summaryrefslogtreecommitdiff
path: root/thirdparty
diff options
context:
space:
mode:
Diffstat (limited to 'thirdparty')
-rw-r--r--thirdparty/README.md48
-rw-r--r--thirdparty/assimp/contrib/utf8cpp/doc/ReleaseNotes12
-rw-r--r--thirdparty/assimp/contrib/utf8cpp/doc/utf8cpp.html1789
-rw-r--r--thirdparty/assimp/include/assimp/config.h (renamed from thirdparty/assimp/assimp/config.h)291
-rw-r--r--thirdparty/assimp/revision.h (renamed from thirdparty/assimp/code/revision.h)0
-rw-r--r--thirdparty/glslang/SPIRV/GLSL.ext.EXT.h1
-rw-r--r--thirdparty/glslang/SPIRV/GLSL.ext.KHR.h3
-rw-r--r--thirdparty/glslang/SPIRV/GLSL.ext.NV.h3
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/SPIRV/GlslangToSpv.cpp1681
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/SPIRV/GlslangToSpv.h2
-rw-r--r--thirdparty/glslang/SPIRV/InReadableOrder.cpp44
-rw-r--r--thirdparty/glslang/SPIRV/Logger.cpp4
-rw-r--r--thirdparty/glslang/SPIRV/Logger.h9
-rw-r--r--thirdparty/glslang/SPIRV/SPVRemapper.h2
-rw-r--r--thirdparty/glslang/SPIRV/SpvBuilder.cpp80
-rw-r--r--thirdparty/glslang/SPIRV/SpvBuilder.h40
-rw-r--r--thirdparty/glslang/SPIRV/SpvPostProcess.cpp78
-rw-r--r--thirdparty/glslang/SPIRV/SpvTools.cpp8
-rw-r--r--thirdparty/glslang/SPIRV/SpvTools.h8
-rw-r--r--thirdparty/glslang/SPIRV/disassemble.cpp43
-rw-r--r--thirdparty/glslang/SPIRV/doc.cpp117
-rw-r--r--thirdparty/glslang/SPIRV/spirv.hpp104
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/SPIRV/spvIR.h53
-rw-r--r--thirdparty/glslang/glslang/Include/BaseTypes.h68
-rw-r--r--thirdparty/glslang/glslang/Include/Common.h2
-rw-r--r--thirdparty/glslang/glslang/Include/ConstantUnion.h228
-rw-r--r--thirdparty/glslang/glslang/Include/PoolAlloc.h1
-rw-r--r--thirdparty/glslang/glslang/Include/Types.h555
-rw-r--r--thirdparty/glslang/glslang/Include/intermediate.h127
-rw-r--r--thirdparty/glslang/glslang/Include/revision.h2
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Constant.cpp198
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp3415
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Initialize.h2
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp916
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp32
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp1392
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h27
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Scan.cpp608
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp128
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp44
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h13
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Versions.cpp217
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/Versions.h24
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/attribute.cpp5
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/attribute.h44
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/gl_types.h4
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/glslang.y1021
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp8031
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h767
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp66
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp1275
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/iomapper.h242
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/limits.cpp2
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp133
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/localintermediate.h625
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/parseVersions.h119
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp17
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp0
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp23
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp2
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp6
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/reflection.cpp42
-rw-r--r--thirdparty/glslang/glslang/MachineIndependent/reflection.h20
-rw-r--r--thirdparty/glslang/glslang/OSDependent/Web/glslang.after.js26
-rw-r--r--thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp269
-rw-r--r--thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js45
-rwxr-xr-x[-rw-r--r--]thirdparty/glslang/glslang/Public/ShaderLang.h180
-rw-r--r--thirdparty/glslang/patches/fix-mingw-snprintf.patch15
-rw-r--r--thirdparty/mbedtls/include/mbedtls/version.h8
-rw-r--r--thirdparty/mbedtls/library/bignum.c5
-rw-r--r--thirdparty/mbedtls/library/cipher.c14
-rw-r--r--thirdparty/mbedtls/library/ecdsa.c4
-rw-r--r--thirdparty/mbedtls/library/pkparse.c120
-rw-r--r--thirdparty/mbedtls/library/rsa.c11
-rw-r--r--thirdparty/mbedtls/library/x509write_csr.c4
-rw-r--r--thirdparty/miniupnpc/miniupnpc/minissdpc.c4
-rw-r--r--thirdparty/miniupnpc/miniupnpc/portlistingparse.c7
-rw-r--r--thirdparty/miniupnpc/miniupnpc/upnpc.c4
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan.hpp42851
-rw-r--r--thirdparty/vulkan/include/vulkan/vulkan_core.h1684
-rw-r--r--thirdparty/vulkan/loader/extension_manual.c19
-rw-r--r--thirdparty/vulkan/loader/extension_manual.h12
-rw-r--r--thirdparty/vulkan/loader/gpa_helper.h21
-rw-r--r--thirdparty/vulkan/loader/loader.c165
-rw-r--r--thirdparty/vulkan/loader/loader.h3
-rw-r--r--thirdparty/vulkan/loader/trampoline.c113
-rw-r--r--thirdparty/vulkan/loader/unknown_ext_chain_gas.S885
-rw-r--r--thirdparty/vulkan/loader/unknown_ext_chain_masm.asm883
-rw-r--r--thirdparty/vulkan/loader/vk_dispatch_table_helper.h45
-rw-r--r--thirdparty/vulkan/loader/vk_layer_dispatch_table.h31
-rw-r--r--thirdparty/vulkan/loader/vk_loader_extensions.c314
-rw-r--r--thirdparty/vulkan/loader/vk_loader_extensions.h13
-rw-r--r--thirdparty/vulkan/loader/vk_loader_platform.h31
-rw-r--r--thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch57
-rw-r--r--thirdparty/vulkan/vk_enum_string_helper.h5912
-rw-r--r--thirdparty/vulkan/vk_mem_alloc.h25530
96 files changed, 50873 insertions, 53275 deletions
diff --git a/thirdparty/README.md b/thirdparty/README.md
index 934d719ca6..5c9c114ad1 100644
--- a/thirdparty/README.md
+++ b/thirdparty/README.md
@@ -9,9 +9,20 @@ Subcategories (`###` level) where needed are separated by a single empty line.
## assimp
- Upstream: http://github.com/assimp/assimp
-- Version: git (308db73d0b3c2d1870cd3e465eaa283692a4cf23)
+- Version: git (308db73d0b3c2d1870cd3e465eaa283692a4cf23, 2019)
- License: BSD-3-Clause
+Files extracted from upstream source:
+
+- Run `cmake .` in root folder to generate files
+- `code/{CApi,Common,FBX,Material,PostProcessing}/`
+- `contrib/utf8cpp/source/`
+- `include/`
+- `revision.h`
+- `CREDITS` and `LICENSE` files
+- `rm -f code/Common/ZipArchiveIOSystem.cpp include/assimp/ZipArchiveIOSystem.h
+ include/assimp/irrXMLWrapper.h`
+
## basis_universal
@@ -151,11 +162,21 @@ the GLES version Godot targets.
## glslang
- Upstream: https://github.com/KhronosGroup/glslang
-- Version: rev.3226
+- Version: git (4fc7a33910fb8e40b970d160e1b38ab3f67fe0f3, 2020)
- License: glslang
-Important: File `glslang/glslang/Include/Common.h` has
-Godot-made change marked with `// -- GODOT --` comments.
+Version should be kept in sync with the one of the used Vulkan SDK (see `vulkan`
+section). Check Vulkan-ValidationLayers at the matching SDK tag for the known
+good glslang commit: https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/scripts/known_good.json
+
+Files extracted from upstream source:
+
+- `glslang`, `OGLCompilersDLL`, `SPIRV`
+- `LICENSE.txt`
+- Unnecessary files like `CMakeLists.txt`, `revision.template` and
+ `updateGrammar` removed.
+
+Patches in the `patches` directory should be re-applied after updates.
## jpeg-compressor
@@ -281,7 +302,7 @@ changes are marked with `// -- GODOT --` comments.
## mbedtls
- Upstream: https://tls.mbed.org/
-- Version: 2.16.4
+- Version: 2.16.5
- License: Apache 2.0
File extracted from upstream release tarball (`-apache.tgz` variant):
@@ -291,9 +312,9 @@ File extracted from upstream release tarball (`-apache.tgz` variant):
- LICENSE and apache-2.0.txt files
- Applied the patch in `thirdparty/mbedtls/patches/1453.diff` (PR 1453).
Soon to be merged upstream. Check it out at next update.
-- Applied the patch in `thirdparty/mbedtls/patches/padlock.diff`. This disables VIA
- padlock support which defines a symbol `unsupported` which clashes with
- a symbol in libwebsockets.
+- Applied the patch in `thirdparty/mbedtls/patches/padlock.diff`. This disables
+ VIA padlock support which defines a symbol `unsupported` which clashes with
+ a pre-defined symbol.
- Added 2 files `godot_core_mbedtls_platform.{c,h}` providing configuration
for light bundling with core.
@@ -301,7 +322,7 @@ File extracted from upstream release tarball (`-apache.tgz` variant):
## miniupnpc
- Upstream: https://github.com/miniupnp/miniupnp/tree/master/miniupnpc
-- Version: git (0ab1d67, 2019)
+- Version: git (4436632, 2020)
- License: BSD-3-Clause
Files extracted from upstream source:
@@ -531,7 +552,7 @@ folder.
## vulkan
- Upstream: https://github.com/KhronosGroup/Vulkan-Loader
-- Version: 1.1.127
+- Version: sdk-1.2.131.2
- License: Apache 2.0
Unless there is a specific reason to package a more recent version, please stick
@@ -547,10 +568,13 @@ Files extracted from upstream source:
`loader/` folder
- `LICENSE.txt`
-`vk_enum_string_helper.h` is taken from the match `Vulkan-ValidationLayers` SDK
-release: https://github.com/KhronosGroup/Vulkan-Loader/tree/master/loader/generated
+`vk_enum_string_helper.h` is taken from the matching `Vulkan-ValidationLayers`
+SDK release: https://github.com/KhronosGroup/Vulkan-ValidationLayers/blob/master/layers/generated/vk_enum_string_helper.h
`vk_mem_alloc.h` is taken from https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator
+Version: 2.3.0
+
+Patches in the `patches` directory should be re-applied after updates.
## wslay
diff --git a/thirdparty/assimp/contrib/utf8cpp/doc/ReleaseNotes b/thirdparty/assimp/contrib/utf8cpp/doc/ReleaseNotes
deleted file mode 100644
index 364411a23d..0000000000
--- a/thirdparty/assimp/contrib/utf8cpp/doc/ReleaseNotes
+++ /dev/null
@@ -1,12 +0,0 @@
-utf8 cpp library
-Release 2.3.4
-
-A minor bug fix release. Thanks to all who reported bugs.
-
-Note: Version 2.3.3 contained a regression, and therefore was removed.
-
-Changes from version 2.3.2
-- Bug fix [39]: checked.h Line 273 and unchecked.h Line 182 have an extra ';'
-- Bug fix [36]: replace_invalid() only works with back_inserter
-
-Files included in the release: utf8.h, core.h, checked.h, unchecked.h, utf8cpp.html, ReleaseNotes
diff --git a/thirdparty/assimp/contrib/utf8cpp/doc/utf8cpp.html b/thirdparty/assimp/contrib/utf8cpp/doc/utf8cpp.html
deleted file mode 100644
index 6f2aacbe7b..0000000000
--- a/thirdparty/assimp/contrib/utf8cpp/doc/utf8cpp.html
+++ /dev/null
@@ -1,1789 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-<html>
- <head>
- <meta name="generator" content=
- "HTML Tidy for Linux/x86 (vers 1st November 2002), see www.w3.org">
- <meta name="description" content=
- "A simple, portable and lightweigt C++ library for easy handling of UTF-8 encoded strings">
- <meta name="keywords" content="UTF-8 C++ portable utf8 unicode generic templates">
- <meta name="author" content="Nemanja Trifunovic">
- <title>
- UTF8-CPP: UTF-8 with C++ in a Portable Way
- </title>
- <style type="text/css">
- <!--
- span.return_value {
- color: brown;
- }
- span.keyword {
- color: blue;
- }
- span.preprocessor {
- color: navy;
- }
- span.literal {
- color: olive;
- }
- span.comment {
- color: green;
- }
- code {
- font-weight: bold;
- }
- ul.toc {
- list-style-type: none;
- }
- p.version {
- font-size: small;
- font-style: italic;
- }
- -->
- </style>
- </head>
- <body>
- <h1>
- UTF8-CPP: UTF-8 with C++ in a Portable Way
- </h1>
- <p>
- <a href="https://sourceforge.net/projects/utfcpp">The Sourceforge project page</a>
- </p>
- <div id="toc">
- <h2>
- Table of Contents
- </h2>
- <ul class="toc">
- <li>
- <a href="#introduction">Introduction</a>
- </li>
- <li>
- <a href="#examples">Examples of Use</a>
- <ul class="toc">
- <li>
- <a href=#introsample>Introductionary Sample </a>
- </li>
- <li>
- <a href=#validfile>Checking if a file contains valid UTF-8 text</a>
- </li>
- <li>
- <a href=#fixinvalid>Ensure that a string contains valid UTF-8 text</a>
- </li>
- </ul>
- <li>
- <a href="#reference">Reference</a>
- <ul class="toc">
- <li>
- <a href="#funutf8">Functions From utf8 Namespace </a>
- </li>
- <li>
- <a href="#typesutf8">Types From utf8 Namespace </a>
- </li>
- <li>
- <a href="#fununchecked">Functions From utf8::unchecked Namespace </a>
- </li>
- <li>
- <a href="#typesunchecked">Types From utf8::unchecked Namespace </a>
- </li>
- </ul>
- </li>
- <li>
- <a href="#points">Points of Interest</a>
- </li>
- <li>
- <a href="#links">Links</a>
- </li>
- </ul>
- </div>
- <h2 id="introduction">
- Introduction
- </h2>
- <p>
- Many C++ developers miss an easy and portable way of handling Unicode encoded
- strings. The original C++ Standard (known as C++98 or C++03) is Unicode agnostic.
- C++11 provides some support for Unicode on core language and library level:
- u8, u, and U character and string literals, char16_t and char32_t character types,
- u16string and u32string library classes, and codecvt support for conversions
- between Unicode encoding forms.
- In the meantime, developers use third party libraries like ICU, OS specific capabilities, or simply
- roll out their own solutions.
- </p>
- <p>
- In order to easily handle UTF-8 encoded Unicode strings, I came up with a small
- generic library. For anybody used to work with STL algorithms and iterators, it should be
- easy and natural to use. The code is freely available for any purpose - check out
- the license at the beginning of the utf8.h file. If you run into
- bugs or performance issues, please let me know and I'll do my best to address them.
- </p>
- <p>
- The purpose of this article is not to offer an introduction to Unicode in general,
- and UTF-8 in particular. If you are not familiar with Unicode, be sure to check out
- <a href="http://www.unicode.org/">Unicode Home Page</a> or some other source of
- information for Unicode. Also, it is not my aim to advocate the use of UTF-8
- encoded strings in C++ programs; if you want to handle UTF-8 encoded strings from
- C++, I am sure you have good reasons for it.
- </p>
- <h2 id="examples">
- Examples of use
- </h2>
- <h3 id="introsample">
- Introductionary Sample
- </h3>
- <p>
- To illustrate the use of the library, let's start with a small but complete program
- that opens a file containing UTF-8 encoded text, reads it line by line, checks each line
- for invalid UTF-8 byte sequences, and converts it to UTF-16 encoding and back to UTF-8:
- </p>
-<pre>
-<span class="preprocessor">#include &lt;fstream&gt;</span>
-<span class="preprocessor">#include &lt;iostream&gt;</span>
-<span class="preprocessor">#include &lt;string&gt;</span>
-<span class="preprocessor">#include &lt;vector&gt;</span>
-<span class="preprocessor">#include "utf8.h"</span>
-<span class="keyword">using namespace</span> std;
-<span class="keyword">int</span> main(<span class="keyword">int</span> argc, <span class="keyword">char</span>** argv)
-{
- <span class="keyword">if</span> (argc != <span class="literal">2</span>) {
- cout &lt;&lt; <span class="literal">"\nUsage: docsample filename\n"</span>;
- <span class="keyword">return</span> <span class="literal">0</span>;
- }
-
- <span class="keyword">const char</span>* test_file_path = argv[1];
- <span class="comment">// Open the test file (contains UTF-8 encoded text)</span>
- ifstream fs8(test_file_path);
- <span class="keyword">if</span> (!fs8.is_open()) {
- cout &lt;&lt; <span class=
-"literal">"Could not open "</span> &lt;&lt; test_file_path &lt;&lt; endl;
- <span class="keyword">return</span> <span class="literal">0</span>;
- }
-
- <span class="keyword">unsigned</span> line_count = <span class="literal">1</span>;
- string line;
- <span class="comment">// Play with all the lines in the file</span>
- <span class="keyword">while</span> (getline(fs8, line)) {
- <span class="comment">// check for invalid utf-8 (for a simple yes/no check, there is also utf8::is_valid function)</span>
- string::iterator end_it = utf8::find_invalid(line.begin(), line.end());
- <span class="keyword">if</span> (end_it != line.end()) {
- cout &lt;&lt; <span class=
-"literal">"Invalid UTF-8 encoding detected at line "</span> &lt;&lt; line_count &lt;&lt; <span
- class="literal">"\n"</span>;
- cout &lt;&lt; <span class=
-"literal">"This part is fine: "</span> &lt;&lt; string(line.begin(), end_it) &lt;&lt; <span
- class="literal">"\n"</span>;
- }
-
- <span class="comment">// Get the line length (at least for the valid part)</span>
- <span class="keyword">int</span> length = utf8::distance(line.begin(), end_it);
- cout &lt;&lt; <span class=
-"literal">"Length of line "</span> &lt;&lt; line_count &lt;&lt; <span class=
-"literal">" is "</span> &lt;&lt; length &lt;&lt; <span class="literal">"\n"</span>;
-
- <span class="comment">// Convert it to utf-16</span>
- vector&lt;unsigned short&gt; utf16line;
- utf8::utf8to16(line.begin(), end_it, back_inserter(utf16line));
-
- <span class="comment">// And back to utf-8</span>
- string utf8line;
- utf8::utf16to8(utf16line.begin(), utf16line.end(), back_inserter(utf8line));
-
- <span class="comment">// Confirm that the conversion went OK:</span>
- <span class="keyword">if</span> (utf8line != string(line.begin(), end_it))
- cout &lt;&lt; <span class=
-"literal">"Error in UTF-16 conversion at line: "</span> &lt;&lt; line_count &lt;&lt; <span
- class="literal">"\n"</span>;
-
- line_count++;
- }
- <span class="keyword">return</span> <span class="literal">0</span>;
-}
-</pre>
- <p>
- In the previous code sample, for each line we performed
- a detection of invalid UTF-8 sequences with <code>find_invalid</code>; the number
- of characters (more precisely - the number of Unicode code points, including the end
- of line and even BOM if there is one) in each line was
- determined with a use of <code>utf8::distance</code>; finally, we have converted
- each line to UTF-16 encoding with <code>utf8to16</code> and back to UTF-8 with
- <code>utf16to8</code>.
- </p>
- <h3 id="validfile">Checking if a file contains valid UTF-8 text</h3>
-<p>
-Here is a function that checks whether the content of a file is valid UTF-8 encoded text without
-reading the content into the memory:
-</p>
-<pre>
-<span class="keyword">bool</span> valid_utf8_file(i<span class="keyword">const char</span>* file_name)
-{
- ifstream ifs(file_name);
- <span class="keyword">if</span> (!ifs)
- <span class="keyword">return false</span>; <span class="comment">// even better, throw here</span>
-
- istreambuf_iterator&lt;<span class="keyword">char</span>&gt; it(ifs.rdbuf());
- istreambuf_iterator&lt;<span class="keyword">char</span>&gt; eos;
-
- <span class="keyword">return</span> utf8::is_valid(it, eos);
-}
-</pre>
-<p>
-Because the function <code>utf8::is_valid()</code> works with input iterators, we were able
-to pass an <code>istreambuf_iterator</code> to it and read the content of the file directly
-without loading it to the memory first.</p>
-<p>
-Note that other functions that take input iterator arguments can be used in a similar way. For
-instance, to read the content of a UTF-8 encoded text file and convert the text to UTF-16, just
-do something like:
-</p>
-<pre>
- utf8::utf8to16(it, eos, back_inserter(u16string));
-</pre>
- <h3 id="fixinvalid">Ensure that a string contains valid UTF-8 text</h3>
-<p>
-If we have some text that "probably" contains UTF-8 encoded text and we want to
-replace any invalid UTF-8 sequence with a replacement character, something like
-the following function may be used:
-</p>
-<pre>
-<span class="keyword">void</span> fix_utf8_string(std::string&amp; str)
-{
- std::string temp;
- utf8::replace_invalid(str.begin(), str.end(), back_inserter(temp));
- str = temp;
-}
-</pre>
-<p>The function will replace any invalid UTF-8 sequence with a Unicode replacement character.
-There is an overloaded function that enables the caller to supply their own replacement character.
-</p>
- <h2 id="reference">
- Reference
- </h2>
- <h3 id="funutf8">
- Functions From utf8 Namespace
- </h3>
- <h4>
- utf8::append
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Encodes a 32 bit code point as a UTF-8 sequence of octets and appends the sequence
- to a UTF-8 string.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-octet_iterator append(uint32_t cp, octet_iterator result);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an output iterator.<br>
- <code>cp</code>: a 32 bit integer representing a code point to append to the
- sequence.<br>
- <code>result</code>: an output iterator to the place in the sequence where to
- append the code point.<br>
- <span class="return_value">Return value</span>: an iterator pointing to the place
- after the newly appended sequence.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">unsigned char</span> u[<span class="literal">5</span>] = {<span
-class="literal">0</span>,<span class="literal">0</span>,<span class=
-"literal">0</span>,<span class="literal">0</span>,<span class="literal">0</span>};
-<span class="keyword">unsigned char</span>* end = append(<span class=
-"literal">0x0448</span>, u);
-assert (u[<span class="literal">0</span>] == <span class=
-"literal">0xd1</span> &amp;&amp; u[<span class="literal">1</span>] == <span class=
-"literal">0x88</span> &amp;&amp; u[<span class="literal">2</span>] == <span class=
-"literal">0</span> &amp;&amp; u[<span class="literal">3</span>] == <span class=
-"literal">0</span> &amp;&amp; u[<span class="literal">4</span>] == <span class=
-"literal">0</span>);
-</pre>
- <p>
- Note that <code>append</code> does not allocate any memory - it is the burden of
- the caller to make sure there is enough memory allocated for the operation. To make
- things more interesting, <code>append</code> can add anywhere between 1 and 4
- octets to the sequence. In practice, you would most often want to use
- <code>std::back_inserter</code> to ensure that the necessary memory is allocated.
- </p>
- <p>
- In case of an invalid code point, a <code>utf8::invalid_code_point</code> exception
- is thrown.
- </p>
- <h4>
- utf8::next
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Given the iterator to the beginning of the UTF-8 sequence, it returns the code
- point and moves the iterator to the next position.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t next(octet_iterator&amp; it, octet_iterator end);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>it</code>: a reference to an iterator pointing to the beginning of an UTF-8
- encoded code point. After the function returns, it is incremented to point to the
- beginning of the next code point.<br>
- <code>end</code>: end of the UTF-8 sequence to be processed. If <code>it</code>
- gets equal to <code>end</code> during the extraction of a code point, an
- <code>utf8::not_enough_room</code> exception is thrown.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- processed UTF-8 code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars;
-<span class="keyword">int</span> cp = next(w, twochars + <span class="literal">6</span>);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars + <span class="literal">3</span>);
-</pre>
- <p>
- This function is typically used to iterate through a UTF-8 encoded string.
- </p>
- <p>
- In case of an invalid UTF-8 seqence, a <code>utf8::invalid_utf8</code> exception is
- thrown.
- </p>
- <h4>
- utf8::peek_next
- </h4>
- <p class="version">
- Available in version 2.1 and later.
- </p>
- <p>
- Given the iterator to the beginning of the UTF-8 sequence, it returns the code
- point for the following sequence without changing the value of the iterator.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t peek_next(octet_iterator it, octet_iterator end);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>it</code>: an iterator pointing to the beginning of an UTF-8
- encoded code point.<br>
- <code>end</code>: end of the UTF-8 sequence to be processed. If <code>it</code>
- gets equal to <code>end</code> during the extraction of a code point, an
- <code>utf8::not_enough_room</code> exception is thrown.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- processed UTF-8 code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars;
-<span class="keyword">int</span> cp = peek_next(w, twochars + <span class="literal">6</span>);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars);
-</pre>
- <p>
- In case of an invalid UTF-8 seqence, a <code>utf8::invalid_utf8</code> exception is
- thrown.
- </p>
- <h4>
- utf8::prior
- </h4>
- <p class="version">
- Available in version 1.02 and later.
- </p>
- <p>
- Given a reference to an iterator pointing to an octet in a UTF-8 sequence, it
- decreases the iterator until it hits the beginning of the previous UTF-8 encoded
- code point and returns the 32 bits representation of the code point.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t prior(octet_iterator&amp; it, octet_iterator start);
-
-</pre>
- <p>
- <code>octet_iterator</code>: a bidirectional iterator.<br>
- <code>it</code>: a reference pointing to an octet within a UTF-8 encoded string.
- After the function returns, it is decremented to point to the beginning of the
- previous code point.<br>
- <code>start</code>: an iterator to the beginning of the sequence where the search
- for the beginning of a code point is performed. It is a
- safety measure to prevent passing the beginning of the string in the search for a
- UTF-8 lead octet.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- previous code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">unsigned char</span>* w = twochars + <span class=
-"literal">3</span>;
-<span class="keyword">int</span> cp = prior (w, twochars);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars);
-</pre>
- <p>
- This function has two purposes: one is two iterate backwards through a UTF-8
- encoded string. Note that it is usually a better idea to iterate forward instead,
- since <code>utf8::next</code> is faster. The second purpose is to find a beginning
- of a UTF-8 sequence if we have a random position within a string. Note that in that
- case <code>utf8::prior</code> may not detect an invalid UTF-8 sequence in some scenarios:
- for instance if there are superfluous trail octets, it will just skip them.
- </p>
- <p>
- <code>it</code> will typically point to the beginning of
- a code point, and <code>start</code> will point to the
- beginning of the string to ensure we don't go backwards too far. <code>it</code> is
- decreased until it points to a lead UTF-8 octet, and then the UTF-8 sequence
- beginning with that octet is decoded to a 32 bit representation and returned.
- </p>
- <p>
- In case <code>start</code> is reached before a UTF-8 lead octet is hit, or if an
- invalid UTF-8 sequence is started by the lead octet, an <code>invalid_utf8</code>
- exception is thrown.
- </p>
- <p>In case <code>start</code> equals <code>it</code>, a <code>not_enough_room</code>
- exception is thrown.
- <h4>
- utf8::previous
- </h4>
- <p class="version">
- Deprecated in version 1.02 and later.
- </p>
- <p>
- Given a reference to an iterator pointing to an octet in a UTF-8 seqence, it
- decreases the iterator until it hits the beginning of the previous UTF-8 encoded
- code point and returns the 32 bits representation of the code point.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t previous(octet_iterator&amp; it, octet_iterator pass_start);
-
-</pre>
- <p>
- <code>octet_iterator</code>: a random access iterator.<br>
- <code>it</code>: a reference pointing to an octet within a UTF-8 encoded string.
- After the function returns, it is decremented to point to the beginning of the
- previous code point.<br>
- <code>pass_start</code>: an iterator to the point in the sequence where the search
- for the beginning of a code point is aborted if no result was reached. It is a
- safety measure to prevent passing the beginning of the string in the search for a
- UTF-8 lead octet.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- previous code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">unsigned char</span>* w = twochars + <span class=
-"literal">3</span>;
-<span class="keyword">int</span> cp = previous (w, twochars - <span class=
-"literal">1</span>);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars);
-</pre>
- <p>
- <code>utf8::previous</code> is deprecated, and <code>utf8::prior</code> should
- be used instead, although the existing code can continue using this function.
- The problem is the parameter <code>pass_start</code> that points to the position
- just before the beginning of the sequence. Standard containers don't have the
- concept of "pass start" and the function can not be used with their iterators.
- </p>
- <p>
- <code>it</code> will typically point to the beginning of
- a code point, and <code>pass_start</code> will point to the octet just before the
- beginning of the string to ensure we don't go backwards too far. <code>it</code> is
- decreased until it points to a lead UTF-8 octet, and then the UTF-8 sequence
- beginning with that octet is decoded to a 32 bit representation and returned.
- </p>
- <p>
- In case <code>pass_start</code> is reached before a UTF-8 lead octet is hit, or if an
- invalid UTF-8 sequence is started by the lead octet, an <code>invalid_utf8</code>
- exception is thrown
- </p>
- <h4>
- utf8::advance
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Advances an iterator by the specified number of code points within an UTF-8
- sequence.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, typename distance_type&gt;
-<span class=
-"keyword">void</span> advance (octet_iterator&amp; it, distance_type n, octet_iterator end);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>distance_type</code>: an integral type convertible to <code>octet_iterator</code>'s difference type.<br>
- <code>it</code>: a reference to an iterator pointing to the beginning of an UTF-8
- encoded code point. After the function returns, it is incremented to point to the
- nth following code point.<br>
- <code>n</code>: a positive integer that shows how many code points we want to
- advance.<br>
- <code>end</code>: end of the UTF-8 sequence to be processed. If <code>it</code>
- gets equal to <code>end</code> during the extraction of a code point, an
- <code>utf8::not_enough_room</code> exception is thrown.<br>
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">unsigned char</span>* w = twochars;
-advance (w, <span class="literal">2</span>, twochars + <span class="literal">6</span>);
-assert (w == twochars + <span class="literal">5</span>);
-</pre>
- <p>
- This function works only "forward". In case of a negative <code>n</code>, there is
- no effect.
- </p>
- <p>
- In case of an invalid code point, a <code>utf8::invalid_code_point</code> exception
- is thrown.
- </p>
- <h4>
- utf8::distance
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Given the iterators to two UTF-8 encoded code points in a seqence, returns the
- number of code points between them.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-<span class=
-"keyword">typename</span> std::iterator_traits&lt;octet_iterator&gt;::difference_type distance (octet_iterator first, octet_iterator last);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>first</code>: an iterator to a beginning of a UTF-8 encoded code point.<br>
- <code>last</code>: an iterator to a "post-end" of the last UTF-8 encoded code
- point in the sequence we are trying to determine the length. It can be the
- beginning of a new code point, or not.<br>
- <span class="return_value">Return value</span> the distance between the iterators,
- in code points.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-size_t dist = utf8::distance(twochars, twochars + <span class="literal">5</span>);
-assert (dist == <span class="literal">2</span>);
-</pre>
- <p>
- This function is used to find the length (in code points) of a UTF-8 encoded
- string. The reason it is called <em>distance</em>, rather than, say,
- <em>length</em> is mainly because developers are used that <em>length</em> is an
- O(1) function. Computing the length of an UTF-8 string is a linear operation, and
- it looked better to model it after <code>std::distance</code> algorithm.
- </p>
- <p>
- In case of an invalid UTF-8 seqence, a <code>utf8::invalid_utf8</code> exception is
- thrown. If <code>last</code> does not point to the past-of-end of a UTF-8 seqence,
- a <code>utf8::not_enough_room</code> exception is thrown.
- </p>
- <h4>
- utf8::utf16to8
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts a UTF-16 encoded string to UTF-8.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> u16bit_iterator, <span class=
-"keyword">typename</span> octet_iterator&gt;
-octet_iterator utf16to8 (u16bit_iterator start, u16bit_iterator end, octet_iterator result);
-
-</pre>
- <p>
- <code>u16bit_iterator</code>: an input iterator.<br>
- <code>octet_iterator</code>: an output iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-16 encoded
- string to convert.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-16 encoded
- string to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-8 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-8 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">unsigned short</span> utf16string[] = {<span class=
-"literal">0x41</span>, <span class="literal">0x0448</span>, <span class=
-"literal">0x65e5</span>, <span class="literal">0xd834</span>, <span class=
-"literal">0xdd1e</span>};
-vector&lt;<span class="keyword">unsigned char</span>&gt; utf8result;
-utf16to8(utf16string, utf16string + <span class=
-"literal">5</span>, back_inserter(utf8result));
-assert (utf8result.size() == <span class="literal">10</span>);
-</pre>
- <p>
- In case of invalid UTF-16 sequence, a <code>utf8::invalid_utf16</code> exception is
- thrown.
- </p>
- <h4>
- utf8::utf8to16
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts an UTF-8 encoded string to UTF-16
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> u16bit_iterator, typename octet_iterator&gt;
-u16bit_iterator utf8to16 (octet_iterator start, octet_iterator end, u16bit_iterator result);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>u16bit_iterator</code>: an output iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 encoded
- string to convert. &lt; br /&gt; <code>end</code>: an iterator pointing to
- pass-the-end of the UTF-8 encoded string to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-16 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-16 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span> utf8_with_surrogates[] = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88\xf0\x9d\x84\x9e"</span>;
-vector &lt;<span class="keyword">unsigned short</span>&gt; utf16result;
-utf8to16(utf8_with_surrogates, utf8_with_surrogates + <span class=
-"literal">9</span>, back_inserter(utf16result));
-assert (utf16result.size() == <span class="literal">4</span>);
-assert (utf16result[<span class="literal">2</span>] == <span class=
-"literal">0xd834</span>);
-assert (utf16result[<span class="literal">3</span>] == <span class=
-"literal">0xdd1e</span>);
-</pre>
- <p>
- In case of an invalid UTF-8 seqence, a <code>utf8::invalid_utf8</code> exception is
- thrown. If <code>end</code> does not point to the past-of-end of a UTF-8 seqence, a
- <code>utf8::not_enough_room</code> exception is thrown.
- </p>
- <h4>
- utf8::utf32to8
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts a UTF-32 encoded string to UTF-8.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, typename u32bit_iterator&gt;
-octet_iterator utf32to8 (u32bit_iterator start, u32bit_iterator end, octet_iterator result);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an output iterator.<br>
- <code>u32bit_iterator</code>: an input iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-32 encoded
- string to convert.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-32 encoded
- string to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-8 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-8 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">int</span> utf32string[] = {<span class=
-"literal">0x448</span>, <span class="literal">0x65E5</span>, <span class=
-"literal">0x10346</span>, <span class="literal">0</span>};
-vector&lt;<span class="keyword">unsigned char</span>&gt; utf8result;
-utf32to8(utf32string, utf32string + <span class=
-"literal">3</span>, back_inserter(utf8result));
-assert (utf8result.size() == <span class="literal">9</span>);
-</pre>
- <p>
- In case of invalid UTF-32 string, a <code>utf8::invalid_code_point</code> exception
- is thrown.
- </p>
- <h4>
- utf8::utf8to32
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts a UTF-8 encoded string to UTF-32.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, <span class=
-"keyword">typename</span> u32bit_iterator&gt;
-u32bit_iterator utf8to32 (octet_iterator start, octet_iterator end, u32bit_iterator result);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>u32bit_iterator</code>: an output iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 encoded
- string to convert.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-8 encoded string
- to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-32 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-32 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-vector&lt;<span class="keyword">int</span>&gt; utf32result;
-utf8to32(twochars, twochars + <span class=
-"literal">5</span>, back_inserter(utf32result));
-assert (utf32result.size() == <span class="literal">2</span>);
-</pre>
- <p>
- In case of an invalid UTF-8 seqence, a <code>utf8::invalid_utf8</code> exception is
- thrown. If <code>end</code> does not point to the past-of-end of a UTF-8 seqence, a
- <code>utf8::not_enough_room</code> exception is thrown.
- </p>
- <h4>
- utf8::find_invalid
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Detects an invalid sequence within a UTF-8 string.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-octet_iterator find_invalid(octet_iterator start, octet_iterator end);
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 string to
- test for validity.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-8 string to test
- for validity.<br>
- <span class="return_value">Return value</span>: an iterator pointing to the first
- invalid octet in the UTF-8 string. In case none were found, equals
- <code>end</code>.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span> utf_invalid[] = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88\xfa"</span>;
-<span class=
-"keyword">char</span>* invalid = find_invalid(utf_invalid, utf_invalid + <span class=
-"literal">6</span>);
-assert (invalid == utf_invalid + <span class="literal">5</span>);
-</pre>
- <p>
- This function is typically used to make sure a UTF-8 string is valid before
- processing it with other functions. It is especially important to call it if before
- doing any of the <em>unchecked</em> operations on it.
- </p>
- <h4>
- utf8::is_valid
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Checks whether a sequence of octets is a valid UTF-8 string.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-<span class="keyword">bool</span> is_valid(octet_iterator start, octet_iterator end);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 string to
- test for validity.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-8 string to test
- for validity.<br>
- <span class="return_value">Return value</span>: <code>true</code> if the sequence
- is a valid UTF-8 string; <code>false</code> if not.
- </p>
- Example of use:
-<pre>
-<span class="keyword">char</span> utf_invalid[] = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88\xfa"</span>;
-<span class="keyword">bool</span> bvalid = is_valid(utf_invalid, utf_invalid + <span
-class="literal">6</span>);
-assert (bvalid == false);
-</pre>
- <p>
- <code>is_valid</code> is a shorthand for <code>find_invalid(start, end) ==
- end;</code>. You may want to use it to make sure that a byte seqence is a valid
- UTF-8 string without the need to know where it fails if it is not valid.
- </p>
- <h4>
- utf8::replace_invalid
- </h4>
- <p class="version">
- Available in version 2.0 and later.
- </p>
- <p>
- Replaces all invalid UTF-8 sequences within a string with a replacement marker.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, <span class=
-"keyword">typename</span> output_iterator&gt;
-output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out, uint32_t replacement);
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, <span class=
-"keyword">typename</span> output_iterator&gt;
-output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out);
-
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>output_iterator</code>: an output iterator.<br>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 string to
- look for invalid UTF-8 sequences.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-8 string to look
- for invalid UTF-8 sequences.<br>
- <code>out</code>: An output iterator to the range where the result of replacement
- is stored.<br>
- <code>replacement</code>: A Unicode code point for the replacement marker. The
- version without this parameter assumes the value <code>0xfffd</code><br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the UTF-8 string with replaced invalid sequences.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span> invalid_sequence[] = <span class=
-"literal">"a\x80\xe0\xa0\xc0\xaf\xed\xa0\x80z"</span>;
-vector&lt;<span class="keyword">char</span>&gt; replace_invalid_result;
-replace_invalid (invalid_sequence, invalid_sequence + sizeof(invalid_sequence), back_inserter(replace_invalid_result), <span
- class="literal">'?'</span>);
-bvalid = is_valid(replace_invalid_result.begin(), replace_invalid_result.end());
-assert (bvalid);
-<span class="keyword">char</span>* fixed_invalid_sequence = <span class=
-"literal">"a????z"</span>;
-assert (std::equal(replace_invalid_result.begin(), replace_invalid_result.end(), fixed_invalid_sequence));
-</pre>
- <p>
- <code>replace_invalid</code> does not perform in-place replacement of invalid
- sequences. Rather, it produces a copy of the original string with the invalid
- sequences replaced with a replacement marker. Therefore, <code>out</code> must not
- be in the <code>[start, end]</code> range.
- </p>
- <p>
- If <code>end</code> does not point to the past-of-end of a UTF-8 sequence, a
- <code>utf8::not_enough_room</code> exception is thrown.
- </p>
- <h4>
- utf8::starts_with_bom
- </h4>
- <p class="version">
- Available in version 2.3 and later. Relaces deprecated <code>is_bom()</code> function.
- </p>
- <p>
- Checks whether an octet sequence starts with a UTF-8 byte order mark (BOM)
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-<span class="keyword">bool</span> starts_with_bom (octet_iterator it, octet_iterator end);
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>it</code>: beginning of the octet sequence to check<br>
- <code>end</code>: pass-end of the sequence to check<br>
- <span class="return_value">Return value</span>: <code>true</code> if the sequence
- starts with a UTF-8 byte order mark; <code>false</code> if not.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">unsigned char</span> byte_order_mark[] = {<span class=
-"literal">0xef</span>, <span class="literal">0xbb</span>, <span class=
-"literal">0xbf</span>};
-<span class="keyword">bool</span> bbom = starts_with_bom(byte_order_mark, byte_order_mark + <span class="keyword">sizeof</span>(byte_order_mark));
-assert (bbom == <span class="literal">true</span>);
-</pre>
- <p>
- The typical use of this function is to check the first three bytes of a file. If
- they form the UTF-8 BOM, we want to skip them before processing the actual UTF-8
- encoded text.
- </p>
- <h4>
- utf8::is_bom
- </h4>
- <p class="version">
- Available in version 1.0 and later. Deprecated in version 2.3. <code>starts_with_bom()</code> should be used
- instead.
- </p>
- <p>
- Checks whether a sequence of three octets is a UTF-8 byte order mark (BOM)
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-<span class="keyword">bool</span> is_bom (octet_iterator it); <span class="comment"> // Deprecated</span>
-</pre>
- <p>
- <code>octet_iterator</code>: an input iterator.<br>
- <code>it</code>: beginning of the 3-octet sequence to check<br>
- <span class="return_value">Return value</span>: <code>true</code> if the sequence
- is UTF-8 byte order mark; <code>false</code> if not.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">unsigned char</span> byte_order_mark[] = {<span class=
-"literal">0xef</span>, <span class="literal">0xbb</span>, <span class=
-"literal">0xbf</span>};
-<span class="keyword">bool</span> bbom = is_bom(byte_order_mark);
-assert (bbom == <span class="literal">true</span>);
-</pre>
- <p>
- The typical use of this function is to check the first three bytes of a file. If
- they form the UTF-8 BOM, we want to skip them before processing the actual UTF-8
- encoded text.
- </p>
- <p>
- If a sequence is
- shorter than three bytes, an invalid iterator will be dereferenced. Therefore, this function is deprecated
- in favor of <code>starts_with_bom()</code>that takes the end of sequence as an argument.
- </p>
- <h3 id="typesutf8">
- Types From utf8 Namespace
- </h3>
- <h4>utf8::exception
- </h4>
- <p class="version">
- Available in version 2.3 and later.
- </p>
- <p>
- Base class for the exceptions thrown by UTF CPP library functions.
- </p>
-<pre>
-<span class="keyword">class</span> exception : <span class="keyword">public</span> std::exception {};
-</pre>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">try</span> {
- code_that_uses_utf_cpp_library();
-}
-<span class="keyword">catch</span>(<span class="keyword">const</span> utf8::exception&amp; utfcpp_ex) {
- cerr &lt;&lt; utfcpp_ex.what();
-}
-</pre>
-
- <h4>utf8::invalid_code_point
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Thrown by UTF8 CPP functions such as <code>advance</code> and <code>next</code> if an UTF-8 sequence represents and invalid code point.
- </p>
-
-<pre>
-<span class="keyword">class</span> invalid_code_point : <span class="keyword">public</span> exception {
-<span class="keyword">public</span>:
- uint32_t code_point() <span class="keyword">const</span>;
-};
-
-</pre>
- <p>
- Member function <code>code_point()</code> can be used to determine the invalid code point that
- caused the exception to be thrown.
- </p>
- <h4>utf8::invalid_utf8
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Thrown by UTF8 CPP functions such as <code>next</code> and <code>prior</code> if an invalid UTF-8 sequence
- is detected during decoding.
- </p>
-
-<pre>
-<span class="keyword">class</span> invalid_utf8 : <span class="keyword">public</span> exception {
-<span class="keyword">public</span>:
- uint8_t utf8_octet() <span class="keyword">const</span>;
-};
-</pre>
-
- <p>
- Member function <code>utf8_octet()</code> can be used to determine the beginning of the byte
- sequence that caused the exception to be thrown.
- </p>
-</pre>
- <h4>utf8::invalid_utf16
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Thrown by UTF8 CPP function <code>utf16to8</code> if an invalid UTF-16 sequence
- is detected during decoding.
- </p>
-
-<pre>
-<span class="keyword">class</span> invalid_utf16 : <span class="keyword">public</span> exception {
-<span class="keyword">public</span>:
- uint16_t utf16_word() <span class="keyword">const</span>;
-};
-</pre>
-
- <p>
- Member function <code>utf16_word()</code> can be used to determine the UTF-16 code unit
- that caused the exception to be thrown.
- </p>
- <h4>utf8::not_enough_room
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Thrown by UTF8 CPP functions such as <code>next</code> if the end of the decoded UTF-8 sequence
- was reached before the code point was decoded.
- </p>
-
-<pre>
-<span class="keyword">class</span> not_enough_room : <span class="keyword">public</span> exception {};
-</pre>
- <h4>
- utf8::iterator
- </h4>
- <p class="version">
- Available in version 2.0 and later.
- </p>
- <p>
- Adapts the underlying octet iterator to iterate over the sequence of code points,
- rather than raw octets.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class="keyword">typename</span> octet_iterator&gt;
-<span class="keyword">class</span> iterator;
-</pre>
-
- <h5>Member functions</h5>
- <dl>
- <dt><code>iterator();</code> <dd> the deafult constructor; the underlying <code>octet_iterator</code> is
- constructed with its default constructor.
- <dt><code><span class="keyword">explicit</span> iterator (const octet_iterator&amp; octet_it,
- const octet_iterator&amp; range_start,
- const octet_iterator&amp; range_end);</code> <dd> a constructor
- that initializes the underlying <code>octet_iterator</code> with <code>octet_it</code>
- and sets the range in which the iterator is considered valid.
- <dt><code>octet_iterator base () <span class="keyword">const</span>;</code> <dd> returns the
- underlying <code>octet_iterator</code>.
- <dt><code>uint32_t operator * () <span class="keyword">const</span>;</code> <dd> decodes the utf-8 sequence
- the underlying <code>octet_iterator</code> is pointing to and returns the code point.
- <dt><code><span class="keyword">bool operator</span> == (const iterator&amp; rhs)
- <span class="keyword">const</span>;</code> <dd> returns <span class="keyword">true</span>
- if the two underlaying iterators are equal.
- <dt><code><span class="keyword">bool operator</span> != (const iterator&amp; rhs)
- <span class="keyword">const</span>;</code> <dd> returns <span class="keyword">true</span>
- if the two underlaying iterators are not equal.
- <dt><code>iterator&amp; <span class="keyword">operator</span> ++ (); </code> <dd> the prefix increment - moves
- the iterator to the next UTF-8 encoded code point.
- <dt><code>iterator <span class="keyword">operator</span> ++ (<span class="keyword">int</span>); </code> <dd>
- the postfix increment - moves the iterator to the next UTF-8 encoded code point and returns the current one.
- <dt><code>iterator&amp; <span class="keyword">operator</span> -- (); </code> <dd> the prefix decrement - moves
- the iterator to the previous UTF-8 encoded code point.
- <dt><code>iterator <span class="keyword">operator</span> -- (<span class="keyword">int</span>); </code> <dd>
- the postfix decrement - moves the iterator to the previous UTF-8 encoded code point and returns the current one.
- </dl>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* threechars = <span class="literal">"\xf0\x90\x8d\x86\xe6\x97\xa5\xd1\x88"</span>;
-utf8::iterator&lt;<span class="keyword">char</span>*&gt; it(threechars, threechars, threechars + <span class="literal">9</span>);
-utf8::iterator&lt;<span class="keyword">char</span>*&gt; it2 = it;
-assert (it2 == it);
-assert (*it == <span class="literal">0x10346</span>);
-assert (*(++it) == <span class="literal">0x65e5</span>);
-assert ((*it++) == <span class="literal">0x65e5</span>);
-assert (*it == <span class="literal">0x0448</span>);
-assert (it != it2);
-utf8::iterator&lt;<span class="keyword">char</span>*&gt; endit (threechars + <span class="literal">9</span>, threechars, threechars + <span class="literal">9</span>);
-assert (++it == endit);
-assert (*(--it) == <span class="literal">0x0448</span>);
-assert ((*it--) == <span class="literal">0x0448</span>);
-assert (*it == <span class="literal">0x65e5</span>);
-assert (--it == utf8::iterator&lt;<span class="keyword">char</span>*&gt;(threechars, threechars, threechars + <span class="literal">9</span>));
-assert (*it == <span class="literal">0x10346</span>);
-</pre>
- <p>
- The purpose of <code>utf8::iterator</code> adapter is to enable easy iteration as well as the use of STL
- algorithms with UTF-8 encoded strings. Increment and decrement operators are implemented in terms of
- <code>utf8::next()</code> and <code>utf8::prior()</code> functions.
- </p>
- <p>
- Note that <code>utf8::iterator</code> adapter is a checked iterator. It operates on the range specified in
- the constructor; any attempt to go out of that range will result in an exception. Even the comparison operators
- require both iterator object to be constructed against the same range - otherwise an exception is thrown. Typically,
- the range will be determined by sequence container functions <code>begin</code> and <code>end</code>, i.e.:
- </p>
-<pre>
-std::string s = <span class="literal">"example"</span>;
-utf8::iterator i (s.begin(), s.begin(), s.end());
-</pre>
- <h3 id="fununchecked">
- Functions From utf8::unchecked Namespace
- </h3>
- <h4>
- utf8::unchecked::append
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Encodes a 32 bit code point as a UTF-8 sequence of octets and appends the sequence
- to a UTF-8 string.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-octet_iterator append(uint32_t cp, octet_iterator result);
-
-</pre>
- <p>
- <code>cp</code>: A 32 bit integer representing a code point to append to the
- sequence.<br>
- <code>result</code>: An output iterator to the place in the sequence where to
- append the code point.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the newly appended sequence.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">unsigned char</span> u[<span class="literal">5</span>] = {<span
-class="literal">0</span>,<span class="literal">0</span>,<span class=
-"literal">0</span>,<span class="literal">0</span>,<span class="literal">0</span>};
-<span class="keyword">unsigned char</span>* end = unchecked::append(<span class=
-"literal">0x0448</span>, u);
-assert (u[<span class="literal">0</span>] == <span class=
-"literal">0xd1</span> &amp;&amp; u[<span class="literal">1</span>] == <span class=
-"literal">0x88</span> &amp;&amp; u[<span class="literal">2</span>] == <span class=
-"literal">0</span> &amp;&amp; u[<span class="literal">3</span>] == <span class=
-"literal">0</span> &amp;&amp; u[<span class="literal">4</span>] == <span class=
-"literal">0</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::append</code>. It does not
- check for validity of the supplied code point, and may produce an invalid UTF-8
- sequence.
- </p>
- <h4>
- utf8::unchecked::next
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Given the iterator to the beginning of a UTF-8 sequence, it returns the code point
- and moves the iterator to the next position.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t next(octet_iterator&amp; it);
-
-</pre>
- <p>
- <code>it</code>: a reference to an iterator pointing to the beginning of an UTF-8
- encoded code point. After the function returns, it is incremented to point to the
- beginning of the next code point.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- processed UTF-8 code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars;
-<span class="keyword">int</span> cp = unchecked::next(w);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars + <span class="literal">3</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::next</code>. It does not
- check for validity of the supplied UTF-8 sequence.
- </p>
- <h4>
- utf8::unchecked::peek_next
- </h4>
- <p class="version">
- Available in version 2.1 and later.
- </p>
- <p>
- Given the iterator to the beginning of a UTF-8 sequence, it returns the code point.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t peek_next(octet_iterator it);
-
-</pre>
- <p>
- <code>it</code>: an iterator pointing to the beginning of an UTF-8
- encoded code point.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- processed UTF-8 code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars;
-<span class="keyword">int</span> cp = unchecked::peek_next(w);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::peek_next</code>. It does not
- check for validity of the supplied UTF-8 sequence.
- </p>
- <h4>
- utf8::unchecked::prior
- </h4>
- <p class="version">
- Available in version 1.02 and later.
- </p>
- <p>
- Given a reference to an iterator pointing to an octet in a UTF-8 seqence, it
- decreases the iterator until it hits the beginning of the previous UTF-8 encoded
- code point and returns the 32 bits representation of the code point.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t prior(octet_iterator&amp; it);
-
-</pre>
- <p>
- <code>it</code>: a reference pointing to an octet within a UTF-8 encoded string.
- After the function returns, it is decremented to point to the beginning of the
- previous code point.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- previous code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars + <span class="literal">3</span>;
-<span class="keyword">int</span> cp = unchecked::prior (w);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::prior</code>. It does not
- check for validity of the supplied UTF-8 sequence and offers no boundary checking.
- </p>
- <h4>
- utf8::unchecked::previous (deprecated, see utf8::unchecked::prior)
- </h4>
- <p class="version">
- Deprecated in version 1.02 and later.
- </p>
- <p>
- Given a reference to an iterator pointing to an octet in a UTF-8 seqence, it
- decreases the iterator until it hits the beginning of the previous UTF-8 encoded
- code point and returns the 32 bits representation of the code point.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-uint32_t previous(octet_iterator&amp; it);
-
-</pre>
- <p>
- <code>it</code>: a reference pointing to an octet within a UTF-8 encoded string.
- After the function returns, it is decremented to point to the beginning of the
- previous code point.<br>
- <span class="return_value">Return value</span>: the 32 bit representation of the
- previous code point.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars + <span class="literal">3</span>;
-<span class="keyword">int</span> cp = unchecked::previous (w);
-assert (cp == <span class="literal">0x65e5</span>);
-assert (w == twochars);
-</pre>
- <p>
- The reason this function is deprecated is just the consistency with the "checked"
- versions, where <code>prior</code> should be used instead of <code>previous</code>.
- In fact, <code>unchecked::previous</code> behaves exactly the same as <code>
- unchecked::prior</code>
- </p>
- <p>
- This is a faster but less safe version of <code>utf8::previous</code>. It does not
- check for validity of the supplied UTF-8 sequence and offers no boundary checking.
- </p>
- <h4>
- utf8::unchecked::advance
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Advances an iterator by the specified number of code points within an UTF-8
- sequence.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, typename distance_type&gt;
-<span class="keyword">void</span> advance (octet_iterator&amp; it, distance_type n);
-
-</pre>
- <p>
- <code>it</code>: a reference to an iterator pointing to the beginning of an UTF-8
- encoded code point. After the function returns, it is incremented to point to the
- nth following code point.<br>
- <code>n</code>: a positive integer that shows how many code points we want to
- advance.<br>
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-<span class="keyword">char</span>* w = twochars;
-unchecked::advance (w, <span class="literal">2</span>);
-assert (w == twochars + <span class="literal">5</span>);
-</pre>
- <p>
- This function works only "forward". In case of a negative <code>n</code>, there is
- no effect.
- </p>
- <p>
- This is a faster but less safe version of <code>utf8::advance</code>. It does not
- check for validity of the supplied UTF-8 sequence and offers no boundary checking.
- </p>
- <h4>
- utf8::unchecked::distance
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Given the iterators to two UTF-8 encoded code points in a seqence, returns the
- number of code points between them.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator&gt;
-<span class=
-"keyword">typename</span> std::iterator_traits&lt;octet_iterator&gt;::difference_type distance (octet_iterator first, octet_iterator last);
-</pre>
- <p>
- <code>first</code>: an iterator to a beginning of a UTF-8 encoded code point.<br>
- <code>last</code>: an iterator to a "post-end" of the last UTF-8 encoded code
- point in the sequence we are trying to determine the length. It can be the
- beginning of a new code point, or not.<br>
- <span class="return_value">Return value</span> the distance between the iterators,
- in code points.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-size_t dist = utf8::unchecked::distance(twochars, twochars + <span class=
-"literal">5</span>);
-assert (dist == <span class="literal">2</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::distance</code>. It does not
- check for validity of the supplied UTF-8 sequence.
- </p>
- <h4>
- utf8::unchecked::utf16to8
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts a UTF-16 encoded string to UTF-8.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> u16bit_iterator, <span class=
-"keyword">typename</span> octet_iterator&gt;
-octet_iterator utf16to8 (u16bit_iterator start, u16bit_iterator end, octet_iterator result);
-
-</pre>
- <p>
- <code>start</code>: an iterator pointing to the beginning of the UTF-16 encoded
- string to convert.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-16 encoded
- string to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-8 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-8 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">unsigned short</span> utf16string[] = {<span class=
-"literal">0x41</span>, <span class="literal">0x0448</span>, <span class=
-"literal">0x65e5</span>, <span class="literal">0xd834</span>, <span class=
-"literal">0xdd1e</span>};
-vector&lt;<span class="keyword">unsigned char</span>&gt; utf8result;
-unchecked::utf16to8(utf16string, utf16string + <span class=
-"literal">5</span>, back_inserter(utf8result));
-assert (utf8result.size() == <span class="literal">10</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::utf16to8</code>. It does not
- check for validity of the supplied UTF-16 sequence.
- </p>
- <h4>
- utf8::unchecked::utf8to16
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts an UTF-8 encoded string to UTF-16
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> u16bit_iterator, typename octet_iterator&gt;
-u16bit_iterator utf8to16 (octet_iterator start, octet_iterator end, u16bit_iterator result);
-
-</pre>
- <p>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 encoded
- string to convert. &lt; br /&gt; <code>end</code>: an iterator pointing to
- pass-the-end of the UTF-8 encoded string to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-16 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-16 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span> utf8_with_surrogates[] = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88\xf0\x9d\x84\x9e"</span>;
-vector &lt;<span class="keyword">unsigned short</span>&gt; utf16result;
-unchecked::utf8to16(utf8_with_surrogates, utf8_with_surrogates + <span class=
-"literal">9</span>, back_inserter(utf16result));
-assert (utf16result.size() == <span class="literal">4</span>);
-assert (utf16result[<span class="literal">2</span>] == <span class=
-"literal">0xd834</span>);
-assert (utf16result[<span class="literal">3</span>] == <span class=
-"literal">0xdd1e</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::utf8to16</code>. It does not
- check for validity of the supplied UTF-8 sequence.
- </p>
- <h4>
- utf8::unchecked::utf32to8
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts a UTF-32 encoded string to UTF-8.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, <span class=
-"keyword">typename</span> u32bit_iterator&gt;
-octet_iterator utf32to8 (u32bit_iterator start, u32bit_iterator end, octet_iterator result);
-
-</pre>
- <p>
- <code>start</code>: an iterator pointing to the beginning of the UTF-32 encoded
- string to convert.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-32 encoded
- string to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-8 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-8 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">int</span> utf32string[] = {<span class=
-"literal">0x448</span>, <span class="literal">0x65e5</span>, <span class=
-"literal">0x10346</span>, <span class="literal">0</span>};
-vector&lt;<span class="keyword">unsigned char</span>&gt; utf8result;
-utf32to8(utf32string, utf32string + <span class=
-"literal">3</span>, back_inserter(utf8result));
-assert (utf8result.size() == <span class="literal">9</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::utf32to8</code>. It does not
- check for validity of the supplied UTF-32 sequence.
- </p>
- <h4>
- utf8::unchecked::utf8to32
- </h4>
- <p class="version">
- Available in version 1.0 and later.
- </p>
- <p>
- Converts a UTF-8 encoded string to UTF-32.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class=
-"keyword">typename</span> octet_iterator, typename u32bit_iterator&gt;
-u32bit_iterator utf8to32 (octet_iterator start, octet_iterator end, u32bit_iterator result);
-
-</pre>
- <p>
- <code>start</code>: an iterator pointing to the beginning of the UTF-8 encoded
- string to convert.<br>
- <code>end</code>: an iterator pointing to pass-the-end of the UTF-8 encoded string
- to convert.<br>
- <code>result</code>: an output iterator to the place in the UTF-32 string where to
- append the result of conversion.<br>
- <span class="return_value">Return value</span>: An iterator pointing to the place
- after the appended UTF-32 string.
- </p>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* twochars = <span class=
-"literal">"\xe6\x97\xa5\xd1\x88"</span>;
-vector&lt;<span class="keyword">int</span>&gt; utf32result;
-unchecked::utf8to32(twochars, twochars + <span class=
-"literal">5</span>, back_inserter(utf32result));
-assert (utf32result.size() == <span class="literal">2</span>);
-</pre>
- <p>
- This is a faster but less safe version of <code>utf8::utf8to32</code>. It does not
- check for validity of the supplied UTF-8 sequence.
- </p>
- <h3 id="typesunchecked">
- Types From utf8::unchecked Namespace
- </h3>
- <h4>
- utf8::iterator
- </h4>
- <p class="version">
- Available in version 2.0 and later.
- </p>
- <p>
- Adapts the underlying octet iterator to iterate over the sequence of code points,
- rather than raw octets.
- </p>
-<pre>
-<span class="keyword">template</span> &lt;<span class="keyword">typename</span> octet_iterator&gt;
-<span class="keyword">class</span> iterator;
-</pre>
-
- <h5>Member functions</h5>
- <dl>
- <dt><code>iterator();</code> <dd> the deafult constructor; the underlying <code>octet_iterator</code> is
- constructed with its default constructor.
- <dt><code><span class="keyword">explicit</span> iterator (const octet_iterator&amp; octet_it);
- </code> <dd> a constructor
- that initializes the underlying <code>octet_iterator</code> with <code>octet_it</code>
- <dt><code>octet_iterator base () <span class="keyword">const</span>;</code> <dd> returns the
- underlying <code>octet_iterator</code>.
- <dt><code>uint32_t operator * () <span class="keyword">const</span>;</code> <dd> decodes the utf-8 sequence
- the underlying <code>octet_iterator</code> is pointing to and returns the code point.
- <dt><code><span class="keyword">bool operator</span> == (const iterator&amp; rhs)
- <span class="keyword">const</span>;</code> <dd> returns <span class="keyword">true</span>
- if the two underlaying iterators are equal.
- <dt><code><span class="keyword">bool operator</span> != (const iterator&amp; rhs)
- <span class="keyword">const</span>;</code> <dd> returns <span class="keyword">true</span>
- if the two underlaying iterators are not equal.
- <dt><code>iterator&amp; <span class="keyword">operator</span> ++ (); </code> <dd> the prefix increment - moves
- the iterator to the next UTF-8 encoded code point.
- <dt><code>iterator <span class="keyword">operator</span> ++ (<span class="keyword">int</span>); </code> <dd>
- the postfix increment - moves the iterator to the next UTF-8 encoded code point and returns the current one.
- <dt><code>iterator&amp; <span class="keyword">operator</span> -- (); </code> <dd> the prefix decrement - moves
- the iterator to the previous UTF-8 encoded code point.
- <dt><code>iterator <span class="keyword">operator</span> -- (<span class="keyword">int</span>); </code> <dd>
- the postfix decrement - moves the iterator to the previous UTF-8 encoded code point and returns the current one.
- </dl>
- <p>
- Example of use:
- </p>
-<pre>
-<span class="keyword">char</span>* threechars = <span class="literal">"\xf0\x90\x8d\x86\xe6\x97\xa5\xd1\x88"</span>;
-utf8::unchecked::iterator&lt;<span class="keyword">char</span>*&gt; un_it(threechars);
-utf8::unchecked::iterator&lt;<span class="keyword">char</span>*&gt; un_it2 = un_it;
-assert (un_it2 == un_it);
-assert (*un_it == <span class="literal">0x10346</span>);
-assert (*(++un_it) == <span class="literal">0x65e5</span>);
-assert ((*un_it++) == <span class="literal">0x65e5</span>);
-assert (*un_it == <span class="literal">0x0448</span>);
-assert (un_it != un_it2);
-utf8::::unchecked::iterator&lt;<span class="keyword">char</span>*&gt; un_endit (threechars + <span class="literal">9</span>);
-assert (++un_it == un_endit);
-assert (*(--un_it) == <span class="literal">0x0448</span>);
-assert ((*un_it--) == <span class="literal">0x0448</span>);
-assert (*un_it == <span class="literal">0x65e5</span>);
-assert (--un_it == utf8::unchecked::iterator&lt;<span class="keyword">char</span>*&gt;(threechars));
-assert (*un_it == <span class="literal">0x10346</span>);
-</pre>
- <p>
- This is an unchecked version of <code>utf8::iterator</code>. It is faster in many cases, but offers
- no validity or range checks.
- </p>
- <h2 id="points">
- Points of interest
- </h2>
- <h4>
- Design goals and decisions
- </h4>
- <p>
- The library was designed to be:
- </p>
- <ol>
- <li>
- Generic: for better or worse, there are many C++ string classes out there, and
- the library should work with as many of them as possible.
- </li>
- <li>
- Portable: the library should be portable both accross different platforms and
- compilers. The only non-portable code is a small section that declares unsigned
- integers of different sizes: three typedefs. They can be changed by the users of
- the library if they don't match their platform. The default setting should work
- for Windows (both 32 and 64 bit), and most 32 bit and 64 bit Unix derivatives.
- </li>
- <li>
- Lightweight: follow the "pay only for what you use" guideline.
- </li>
- <li>
- Unintrusive: avoid forcing any particular design or even programming style on the
- user. This is a library, not a framework.
- </li>
- </ol>
- <h4>
- Alternatives
- </h4>
- <p>
- In case you want to look into other means of working with UTF-8 strings from C++,
- here is the list of solutions I am aware of:
- </p>
- <ol>
- <li>
- <a href="http://icu.sourceforge.net/">ICU Library</a>. It is very powerful,
- complete, feature-rich, mature, and widely used. Also big, intrusive,
- non-generic, and doesn't play well with the Standard Library. I definitelly
- recommend looking at ICU even if you don't plan to use it.
- </li>
- <li>
- C++11 language and library features. Still far from complete, and not widely
- supported by compiler vendors.
- </li>
- <li>
- <a href=
- "http://www.gtkmm.org/gtkmm2/docs/tutorial/html/ch03s04.html">Glib::ustring</a>.
- A class specifically made to work with UTF-8 strings, and also feel like
- <code>std::string</code>. If you prefer to have yet another string class in your
- code, it may be worth a look. Be aware of the licensing issues, though.
- </li>
- <li>
- Platform dependent solutions: Windows and POSIX have functions to convert strings
- from one encoding to another. That is only a subset of what my library offers,
- but if that is all you need it may be good enough.
- </li>
- </ol>
- <h2 id="links">
- Links
- </h2>
- <ol>
- <li>
- <a href="http://www.unicode.org/">The Unicode Consortium</a>.
- </li>
- <li>
- <a href="http://icu.sourceforge.net/">ICU Library</a>.
- </li>
- <li>
- <a href="http://en.wikipedia.org/wiki/UTF-8">UTF-8 at Wikipedia</a>
- </li>
- <li>
- <a href="http://www.cl.cam.ac.uk/~mgk25/unicode.html">UTF-8 and Unicode FAQ for
- Unix/Linux</a>
- </li>
- </ol>
- </body>
-</html>
diff --git a/thirdparty/assimp/assimp/config.h b/thirdparty/assimp/include/assimp/config.h
index d0e4817349..48d61941ad 100644
--- a/thirdparty/assimp/assimp/config.h
+++ b/thirdparty/assimp/include/assimp/config.h
@@ -60,6 +60,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef AI_CONFIG_H_INC
#define AI_CONFIG_H_INC
+
// ###########################################################################
// LIBRARY SETTINGS
// General, global settings
@@ -75,8 +76,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Property type: bool. Default value: false.
*/
-#define AI_CONFIG_GLOB_MEASURE_TIME \
- "GLOB_MEASURE_TIME"
+#define AI_CONFIG_GLOB_MEASURE_TIME \
+ "GLOB_MEASURE_TIME"
+
// ---------------------------------------------------------------------------
/** @brief Global setting to disable generation of skeleton dummy meshes
@@ -87,9 +89,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// ---------------------------------------------------------------------------
#define AI_CONFIG_IMPORT_NO_SKELETON_MESHES \
- "IMPORT_NO_SKELETON_MESHES"
+ "IMPORT_NO_SKELETON_MESHES"
+
-#if 0 // not implemented yet
+
+# if 0 // not implemented yet
// ---------------------------------------------------------------------------
/** @brief Set Assimp's multithreading policy.
*
@@ -105,8 +109,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* For more information, see the @link threading Threading page@endlink.
* Property type: int, default value: -1.
*/
-#define AI_CONFIG_GLOB_MULTITHREADING \
- "GLOB_MULTITHREADING"
+#define AI_CONFIG_GLOB_MULTITHREADING \
+ "GLOB_MULTITHREADING"
#endif
// ###########################################################################
@@ -114,6 +118,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Various stuff to fine-tune the behavior of a specific post processing step.
// ###########################################################################
+
// ---------------------------------------------------------------------------
/** @brief Maximum bone count per mesh for the SplitbyBoneCount step.
*
@@ -124,13 +129,15 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// ---------------------------------------------------------------------------
#define AI_CONFIG_PP_SBBC_MAX_BONES \
- "PP_SBBC_MAX_BONES"
+ "PP_SBBC_MAX_BONES"
+
// default limit for bone count
#if (!defined AI_SBBC_DEFAULT_MAX_BONES)
-#define AI_SBBC_DEFAULT_MAX_BONES 60
+# define AI_SBBC_DEFAULT_MAX_BONES 60
#endif
+
// ---------------------------------------------------------------------------
/** @brief Specifies the maximum angle that may be between two vertex tangents
* that their tangents and bi-tangents are smoothed.
@@ -140,7 +147,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* Property type: float. Default value: 45 degrees
*/
#define AI_CONFIG_PP_CT_MAX_SMOOTHING_ANGLE \
- "PP_CT_MAX_SMOOTHING_ANGLE"
+ "PP_CT_MAX_SMOOTHING_ANGLE"
// ---------------------------------------------------------------------------
/** @brief Source UV channel for tangent space computation.
@@ -150,7 +157,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// ---------------------------------------------------------------------------
#define AI_CONFIG_PP_CT_TEXTURE_CHANNEL_INDEX \
- "PP_CT_TEXTURE_CHANNEL_INDEX"
+ "PP_CT_TEXTURE_CHANNEL_INDEX"
// ---------------------------------------------------------------------------
/** @brief Specifies the maximum angle that may be between two face normals
@@ -165,7 +172,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* the output quality may be reduced.
*/
#define AI_CONFIG_PP_GSN_MAX_SMOOTHING_ANGLE \
- "PP_GSN_MAX_SMOOTHING_ANGLE"
+ "PP_GSN_MAX_SMOOTHING_ANGLE"
+
// ---------------------------------------------------------------------------
/** @brief Sets the colormap (= palette) to be used to decode embedded
@@ -177,8 +185,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* a default palette (from Quake 1) is used.
* Property type: string.
*/
-#define AI_CONFIG_IMPORT_MDL_COLORMAP \
- "IMPORT_MDL_COLORMAP"
+#define AI_CONFIG_IMPORT_MDL_COLORMAP \
+ "IMPORT_MDL_COLORMAP"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_RemoveRedundantMaterials step to
@@ -199,8 +207,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* @note Linefeeds, tabs or carriage returns are treated as whitespace.
* Material names are case sensitive.
*/
-#define AI_CONFIG_PP_RRM_EXCLUDE_LIST \
- "PP_RRM_EXCLUDE_LIST"
+#define AI_CONFIG_PP_RRM_EXCLUDE_LIST \
+ "PP_RRM_EXCLUDE_LIST"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_PreTransformVertices step to
@@ -214,8 +222,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* any transformations.
* Property type: bool. Default value: false.
*/
-#define AI_CONFIG_PP_PTV_KEEP_HIERARCHY \
- "PP_PTV_KEEP_HIERARCHY"
+#define AI_CONFIG_PP_PTV_KEEP_HIERARCHY \
+ "PP_PTV_KEEP_HIERARCHY"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_PreTransformVertices step to normalize
@@ -224,8 +232,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* meshes are scaled appropriately (uniformly of course!).
* This might be useful if you don't know the spatial dimension of the input
* data*/
-#define AI_CONFIG_PP_PTV_NORMALIZE \
- "PP_PTV_NORMALIZE"
+#define AI_CONFIG_PP_PTV_NORMALIZE \
+ "PP_PTV_NORMALIZE"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_PreTransformVertices step to use
@@ -233,8 +241,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* transforming vertices.
* Property type: bool. Default value: false.
*/
-#define AI_CONFIG_PP_PTV_ADD_ROOT_TRANSFORMATION \
- "PP_PTV_ADD_ROOT_TRANSFORMATION"
+#define AI_CONFIG_PP_PTV_ADD_ROOT_TRANSFORMATION \
+ "PP_PTV_ADD_ROOT_TRANSFORMATION"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_PreTransformVertices step to use
@@ -243,8 +251,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* of the transformation matrix.
* Property type: aiMatrix4x4.
*/
-#define AI_CONFIG_PP_PTV_ROOT_TRANSFORMATION \
- "PP_PTV_ROOT_TRANSFORMATION"
+#define AI_CONFIG_PP_PTV_ROOT_TRANSFORMATION \
+ "PP_PTV_ROOT_TRANSFORMATION"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_FindDegenerates step to
@@ -257,7 +265,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* Property type: bool. Default value: false.
*/
#define AI_CONFIG_PP_FD_REMOVE \
- "PP_FD_REMOVE"
+ "PP_FD_REMOVE"
// ---------------------------------------------------------------------------
/**
@@ -266,7 +274,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* be removed if #AI_CONFIG_PP_FD_REMOVE is set to true.
*/
#define AI_CONFIG_PP_FD_CHECKAREA \
- "PP_FD_CHECKAREA"
+ "PP_FD_CHECKAREA"
// ---------------------------------------------------------------------------
/** @brief Configures the #aiProcess_OptimizeGraph step to preserve nodes
@@ -286,8 +294,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* @note Linefeeds, tabs or carriage returns are treated as whitespace.
* Node names are case sensitive.
*/
-#define AI_CONFIG_PP_OG_EXCLUDE_LIST \
- "PP_OG_EXCLUDE_LIST"
+#define AI_CONFIG_PP_OG_EXCLUDE_LIST \
+ "PP_OG_EXCLUDE_LIST"
// ---------------------------------------------------------------------------
/** @brief Set the maximum number of triangles in a mesh.
@@ -298,11 +306,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* Property type: integer.
*/
#define AI_CONFIG_PP_SLM_TRIANGLE_LIMIT \
- "PP_SLM_TRIANGLE_LIMIT"
+ "PP_SLM_TRIANGLE_LIMIT"
// default value for AI_CONFIG_PP_SLM_TRIANGLE_LIMIT
#if (!defined AI_SLM_DEFAULT_MAX_TRIANGLES)
-#define AI_SLM_DEFAULT_MAX_TRIANGLES 1000000
+# define AI_SLM_DEFAULT_MAX_TRIANGLES 1000000
#endif
// ---------------------------------------------------------------------------
@@ -314,11 +322,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* Property type: integer.
*/
#define AI_CONFIG_PP_SLM_VERTEX_LIMIT \
- "PP_SLM_VERTEX_LIMIT"
+ "PP_SLM_VERTEX_LIMIT"
// default value for AI_CONFIG_PP_SLM_VERTEX_LIMIT
#if (!defined AI_SLM_DEFAULT_MAX_VERTICES)
-#define AI_SLM_DEFAULT_MAX_VERTICES 1000000
+# define AI_SLM_DEFAULT_MAX_VERTICES 1000000
#endif
// ---------------------------------------------------------------------------
@@ -327,12 +335,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* This is used by the #aiProcess_LimitBoneWeights PostProcess-Step.
* @note The default value is AI_LMW_MAX_WEIGHTS
* Property type: integer.*/
-#define AI_CONFIG_PP_LBW_MAX_WEIGHTS \
- "PP_LBW_MAX_WEIGHTS"
+#define AI_CONFIG_PP_LBW_MAX_WEIGHTS \
+ "PP_LBW_MAX_WEIGHTS"
// default value for AI_CONFIG_PP_LBW_MAX_WEIGHTS
#if (!defined AI_LMW_MAX_WEIGHTS)
-#define AI_LMW_MAX_WEIGHTS 0x4
+# define AI_LMW_MAX_WEIGHTS 0x4
#endif // !! AI_LMW_MAX_WEIGHTS
// ---------------------------------------------------------------------------
@@ -342,11 +350,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* @note The default value is AI_DEBONE_THRESHOLD
* Property type: float.*/
#define AI_CONFIG_PP_DB_THRESHOLD \
- "PP_DB_THRESHOLD"
+ "PP_DB_THRESHOLD"
// default value for AI_CONFIG_PP_LBW_MAX_WEIGHTS
#if (!defined AI_DEBONE_THRESHOLD)
-#define AI_DEBONE_THRESHOLD 1.0f
+# define AI_DEBONE_THRESHOLD 1.0f
#endif // !! AI_DEBONE_THRESHOLD
// ---------------------------------------------------------------------------
@@ -356,12 +364,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* @note The default value is 0
* Property type: bool.*/
#define AI_CONFIG_PP_DB_ALL_OR_NONE \
- "PP_DB_ALL_OR_NONE"
+ "PP_DB_ALL_OR_NONE"
/** @brief Default value for the #AI_CONFIG_PP_ICL_PTCACHE_SIZE property
*/
#ifndef PP_ICL_PTCACHE_SIZE
-#define PP_ICL_PTCACHE_SIZE 12
+# define PP_ICL_PTCACHE_SIZE 12
#endif
// ---------------------------------------------------------------------------
@@ -375,7 +383,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* performance improvements for most nVidia/AMD cards since 2002.
* Property type: integer.
*/
-#define AI_CONFIG_PP_ICL_PTCACHE_SIZE "PP_ICL_PTCACHE_SIZE"
+#define AI_CONFIG_PP_ICL_PTCACHE_SIZE "PP_ICL_PTCACHE_SIZE"
// ---------------------------------------------------------------------------
/** @brief Enumerates components of the aiScene and aiMesh data structures
@@ -383,71 +391,73 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* See the documentation to #aiProcess_RemoveComponent for more details.
*/
-enum aiComponent {
-/** Normal vectors */
+enum aiComponent
+{
+ /** Normal vectors */
#ifdef SWIG
aiComponent_NORMALS = 0x2,
#else
- aiComponent_NORMALS = 0x2u,
+ aiComponent_NORMALS = 0x2u,
#endif
-/** Tangents and bitangents go always together ... */
+ /** Tangents and bitangents go always together ... */
#ifdef SWIG
aiComponent_TANGENTS_AND_BITANGENTS = 0x4,
#else
- aiComponent_TANGENTS_AND_BITANGENTS = 0x4u,
+ aiComponent_TANGENTS_AND_BITANGENTS = 0x4u,
#endif
- /** ALL color sets
+ /** ALL color sets
* Use aiComponent_COLORn(N) to specify the N'th set */
- aiComponent_COLORS = 0x8,
+ aiComponent_COLORS = 0x8,
- /** ALL texture UV sets
+ /** ALL texture UV sets
* aiComponent_TEXCOORDn(N) to specify the N'th set */
- aiComponent_TEXCOORDS = 0x10,
+ aiComponent_TEXCOORDS = 0x10,
- /** Removes all bone weights from all meshes.
+ /** Removes all bone weights from all meshes.
* The scenegraph nodes corresponding to the bones are NOT removed.
* use the #aiProcess_OptimizeGraph step to do this */
- aiComponent_BONEWEIGHTS = 0x20,
+ aiComponent_BONEWEIGHTS = 0x20,
- /** Removes all node animations (aiScene::mAnimations).
+ /** Removes all node animations (aiScene::mAnimations).
* The corresponding scenegraph nodes are NOT removed.
* use the #aiProcess_OptimizeGraph step to do this */
- aiComponent_ANIMATIONS = 0x40,
+ aiComponent_ANIMATIONS = 0x40,
- /** Removes all embedded textures (aiScene::mTextures) */
- aiComponent_TEXTURES = 0x80,
+ /** Removes all embedded textures (aiScene::mTextures) */
+ aiComponent_TEXTURES = 0x80,
- /** Removes all light sources (aiScene::mLights).
+ /** Removes all light sources (aiScene::mLights).
* The corresponding scenegraph nodes are NOT removed.
* use the #aiProcess_OptimizeGraph step to do this */
- aiComponent_LIGHTS = 0x100,
+ aiComponent_LIGHTS = 0x100,
- /** Removes all cameras (aiScene::mCameras).
+ /** Removes all cameras (aiScene::mCameras).
* The corresponding scenegraph nodes are NOT removed.
* use the #aiProcess_OptimizeGraph step to do this */
- aiComponent_CAMERAS = 0x200,
+ aiComponent_CAMERAS = 0x200,
- /** Removes all meshes (aiScene::mMeshes). */
- aiComponent_MESHES = 0x400,
+ /** Removes all meshes (aiScene::mMeshes). */
+ aiComponent_MESHES = 0x400,
- /** Removes all materials. One default material will
+ /** Removes all materials. One default material will
* be generated, so aiScene::mNumMaterials will be 1. */
- aiComponent_MATERIALS = 0x800,
+ aiComponent_MATERIALS = 0x800,
+
-/** This value is not used. It is just there to force the
+ /** This value is not used. It is just there to force the
* compiler to map this enum to a 32 Bit integer. */
#ifndef SWIG
- _aiComponent_Force32Bit = 0x9fffffff
+ _aiComponent_Force32Bit = 0x9fffffff
#endif
};
// Remove a specific color channel 'n'
-#define aiComponent_COLORSn(n) (1u << (n + 20u))
+#define aiComponent_COLORSn(n) (1u << (n+20u))
// Remove a specific UV channel 'n'
-#define aiComponent_TEXCOORDSn(n) (1u << (n + 25u))
+#define aiComponent_TEXCOORDSn(n) (1u << (n+25u))
// ---------------------------------------------------------------------------
/** @brief Input parameter to the #aiProcess_RemoveComponent step:
@@ -461,8 +471,8 @@ enum aiComponent {
* of the flags defined above) the import FAILS. Mainly because there is
* no data to work on anymore ...
*/
-#define AI_CONFIG_PP_RVC_FLAGS \
- "PP_RVC_FLAGS"
+#define AI_CONFIG_PP_RVC_FLAGS \
+ "PP_RVC_FLAGS"
// ---------------------------------------------------------------------------
/** @brief Input parameter to the #aiProcess_SortByPType step:
@@ -473,8 +483,8 @@ enum aiComponent {
* be to exclude all line and point meshes from the import. This
* is an integer property, its default value is 0.
*/
-#define AI_CONFIG_PP_SBP_REMOVE \
- "PP_SBP_REMOVE"
+#define AI_CONFIG_PP_SBP_REMOVE \
+ "PP_SBP_REMOVE"
// ---------------------------------------------------------------------------
/** @brief Input parameter to the #aiProcess_FindInvalidData step:
@@ -485,8 +495,16 @@ enum aiComponent {
* abs(n0-n1)>epsilon holds true for all vector respectively quaternion
* components. The default value is 0.f - comparisons are exact then.
*/
-#define AI_CONFIG_PP_FID_ANIM_ACCURACY \
- "PP_FID_ANIM_ACCURACY"
+#define AI_CONFIG_PP_FID_ANIM_ACCURACY \
+ "PP_FID_ANIM_ACCURACY"
+
+// ---------------------------------------------------------------------------
+/** @brief Input parameter to the #aiProcess_FindInvalidData step:
+ * Set to true to ignore texture coordinates. This may be useful if you have
+ * to assign different kind of textures like one for the summer or one for the winter.
+ */
+#define AI_CONFIG_PP_FID_IGNORE_TEXTURECOORDS \
+ "PP_FID_IGNORE_TEXTURECOORDS"
// TransformUVCoords evaluates UV scalings
#define AI_UVTRAFO_SCALING 0x1
@@ -501,14 +519,6 @@ enum aiComponent {
#define AI_UVTRAFO_ALL (AI_UVTRAFO_SCALING | AI_UVTRAFO_ROTATION | AI_UVTRAFO_TRANSLATION)
// ---------------------------------------------------------------------------
-/** @brief Input parameter to the #aiProcess_FindInvalidData step:
- * Set to true to ignore texture coordinates. This may be useful if you have
- * to assign different kind of textures like one for the summer or one for the winter.
- */
-#define AI_CONFIG_PP_FID_IGNORE_TEXTURECOORDS \
- "PP_FID_IGNORE_TEXTURECOORDS"
-
-// ---------------------------------------------------------------------------
/** @brief Input parameter to the #aiProcess_TransformUVCoords step:
* Specifies which UV transformations are evaluated.
*
@@ -516,8 +526,8 @@ enum aiComponent {
* property, of course). By default all transformations are enabled
* (AI_UVTRAFO_ALL).
*/
-#define AI_CONFIG_PP_TUV_EVALUATE \
- "PP_TUV_EVALUATE"
+#define AI_CONFIG_PP_TUV_EVALUATE \
+ "PP_TUV_EVALUATE"
// ---------------------------------------------------------------------------
/** @brief A hint to assimp to favour speed against import quality.
@@ -528,14 +538,16 @@ enum aiComponent {
* This property is expected to be an integer, != 0 stands for true.
* The default value is 0.
*/
-#define AI_CONFIG_FAVOUR_SPEED \
- "FAVOUR_SPEED"
+#define AI_CONFIG_FAVOUR_SPEED \
+ "FAVOUR_SPEED"
+
// ###########################################################################
// IMPORTER SETTINGS
// Various stuff to fine-tune the behaviour of specific importer plugins.
// ###########################################################################
+
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will merge all geometry layers present
* in the source file or take only the first.
@@ -544,7 +556,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_ALL_GEOMETRY_LAYERS \
- "IMPORT_FBX_READ_ALL_GEOMETRY_LAYERS"
+ "IMPORT_FBX_READ_ALL_GEOMETRY_LAYERS"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will read all materials present in the
@@ -556,7 +568,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_ALL_MATERIALS \
- "IMPORT_FBX_READ_ALL_MATERIALS"
+ "IMPORT_FBX_READ_ALL_MATERIALS"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will read materials.
@@ -565,7 +577,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_MATERIALS \
- "IMPORT_FBX_READ_MATERIALS"
+ "IMPORT_FBX_READ_MATERIALS"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will read embedded textures.
@@ -574,7 +586,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_TEXTURES \
- "IMPORT_FBX_READ_TEXTURES"
+ "IMPORT_FBX_READ_TEXTURES"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will read cameras.
@@ -583,7 +595,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_CAMERAS \
- "IMPORT_FBX_READ_CAMERAS"
+ "IMPORT_FBX_READ_CAMERAS"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will read light sources.
@@ -592,7 +604,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_LIGHTS \
- "IMPORT_FBX_READ_LIGHTS"
+ "IMPORT_FBX_READ_LIGHTS"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will read animations.
@@ -601,7 +613,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_READ_ANIMATIONS \
- "IMPORT_FBX_READ_ANIMATIONS"
+ "IMPORT_FBX_READ_ANIMATIONS"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will act in strict mode in which only
@@ -613,7 +625,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_STRICT_MODE \
- "IMPORT_FBX_STRICT_MODE"
+ "IMPORT_FBX_STRICT_MODE"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will preserve pivot points for
@@ -624,7 +636,7 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_PRESERVE_PIVOTS \
- "IMPORT_FBX_PRESERVE_PIVOTS"
+ "IMPORT_FBX_PRESERVE_PIVOTS"
// ---------------------------------------------------------------------------
/** @brief Specifies whether the importer will drop empty animation curves or
@@ -635,26 +647,26 @@ enum aiComponent {
* Property type: bool
*/
#define AI_CONFIG_IMPORT_FBX_OPTIMIZE_EMPTY_ANIMATION_CURVES \
- "IMPORT_FBX_OPTIMIZE_EMPTY_ANIMATION_CURVES"
+ "IMPORT_FBX_OPTIMIZE_EMPTY_ANIMATION_CURVES"
// ---------------------------------------------------------------------------
/** @brief Set whether the fbx importer will use the legacy embedded texture naming.
-*
-* The default value is false (0)
-* Property type: bool
-*/
+ *
+ * The default value is false (0)
+ * Property type: bool
+ */
#define AI_CONFIG_IMPORT_FBX_EMBEDDED_TEXTURES_LEGACY_NAMING \
"AI_CONFIG_IMPORT_FBX_EMBEDDED_TEXTURES_LEGACY_NAMING"
// ---------------------------------------------------------------------------
-/** @brief Set wether the FBX importer shall not remove empty bones.
- *
+/** @brief Set wether the importer shall not remove empty bones.
*
* Empty bone are often used to define connections for other models.
*/
#define AI_CONFIG_IMPORT_REMOVE_EMPTY_BONES \
"AI_CONFIG_IMPORT_REMOVE_EMPTY_BONES"
+
// ---------------------------------------------------------------------------
/** @brief Set wether the FBX importer shall convert the unit from cm to m.
*/
@@ -674,14 +686,14 @@ enum aiComponent {
* want to override the global setting).
* Property type: integer.
*/
-#define AI_CONFIG_IMPORT_GLOBAL_KEYFRAME "IMPORT_GLOBAL_KEYFRAME"
+#define AI_CONFIG_IMPORT_GLOBAL_KEYFRAME "IMPORT_GLOBAL_KEYFRAME"
-#define AI_CONFIG_IMPORT_MD3_KEYFRAME "IMPORT_MD3_KEYFRAME"
-#define AI_CONFIG_IMPORT_MD2_KEYFRAME "IMPORT_MD2_KEYFRAME"
-#define AI_CONFIG_IMPORT_MDL_KEYFRAME "IMPORT_MDL_KEYFRAME"
-#define AI_CONFIG_IMPORT_MDC_KEYFRAME "IMPORT_MDC_KEYFRAME"
-#define AI_CONFIG_IMPORT_SMD_KEYFRAME "IMPORT_SMD_KEYFRAME"
-#define AI_CONFIG_IMPORT_UNREAL_KEYFRAME "IMPORT_UNREAL_KEYFRAME"
+#define AI_CONFIG_IMPORT_MD3_KEYFRAME "IMPORT_MD3_KEYFRAME"
+#define AI_CONFIG_IMPORT_MD2_KEYFRAME "IMPORT_MD2_KEYFRAME"
+#define AI_CONFIG_IMPORT_MDL_KEYFRAME "IMPORT_MDL_KEYFRAME"
+#define AI_CONFIG_IMPORT_MDC_KEYFRAME "IMPORT_MDC_KEYFRAME"
+#define AI_CONFIG_IMPORT_SMD_KEYFRAME "IMPORT_SMD_KEYFRAME"
+#define AI_CONFIG_IMPORT_UNREAL_KEYFRAME "IMPORT_UNREAL_KEYFRAME"
// ---------------------------------------------------------------------------
/** Smd load multiple animations
@@ -697,7 +709,7 @@ enum aiComponent {
* Property type: bool. Default value: true.
*/
#define AI_CONFIG_IMPORT_AC_SEPARATE_BFCULL \
- "IMPORT_AC_SEPARATE_BFCULL"
+ "IMPORT_AC_SEPARATE_BFCULL"
// ---------------------------------------------------------------------------
/** @brief Configures whether the AC loader evaluates subdivision surfaces (
@@ -707,8 +719,8 @@ enum aiComponent {
*
* * Property type: bool. Default value: true.
*/
-#define AI_CONFIG_IMPORT_AC_EVAL_SUBDIVISION \
- "IMPORT_AC_EVAL_SUBDIVISION"
+#define AI_CONFIG_IMPORT_AC_EVAL_SUBDIVISION \
+ "IMPORT_AC_EVAL_SUBDIVISION"
// ---------------------------------------------------------------------------
/** @brief Configures the UNREAL 3D loader to separate faces with different
@@ -717,7 +729,7 @@ enum aiComponent {
* * Property type: bool. Default value: true.
*/
#define AI_CONFIG_IMPORT_UNREAL_HANDLE_FLAGS \
- "UNREAL_HANDLE_FLAGS"
+ "UNREAL_HANDLE_FLAGS"
// ---------------------------------------------------------------------------
/** @brief Configures the terragen import plugin to compute uv's for
@@ -730,7 +742,7 @@ enum aiComponent {
* * Property type: bool. Default value: false.
*/
#define AI_CONFIG_IMPORT_TER_MAKE_UVS \
- "IMPORT_TER_MAKE_UVS"
+ "IMPORT_TER_MAKE_UVS"
// ---------------------------------------------------------------------------
/** @brief Configures the ASE loader to always reconstruct normal vectors
@@ -739,8 +751,8 @@ enum aiComponent {
* Some ASE files have carry invalid normals, other don't.
* * Property type: bool. Default value: true.
*/
-#define AI_CONFIG_IMPORT_ASE_RECONSTRUCT_NORMALS \
- "IMPORT_ASE_RECONSTRUCT_NORMALS"
+#define AI_CONFIG_IMPORT_ASE_RECONSTRUCT_NORMALS \
+ "IMPORT_ASE_RECONSTRUCT_NORMALS"
// ---------------------------------------------------------------------------
/** @brief Configures the M3D loader to detect and process multi-part
@@ -752,7 +764,7 @@ enum aiComponent {
* Property type: bool. Default value: true.
*/
#define AI_CONFIG_IMPORT_MD3_HANDLE_MULTIPART \
- "IMPORT_MD3_HANDLE_MULTIPART"
+ "IMPORT_MD3_HANDLE_MULTIPART"
// ---------------------------------------------------------------------------
/** @brief Tells the MD3 loader which skin files to load.
@@ -764,7 +776,7 @@ enum aiComponent {
* Property type: String. Default value: "default".
*/
#define AI_CONFIG_IMPORT_MD3_SKIN_NAME \
- "IMPORT_MD3_SKIN_NAME"
+ "IMPORT_MD3_SKIN_NAME"
// ---------------------------------------------------------------------------
/** @brief Specify the Quake 3 shader file to be used for a particular
@@ -782,7 +794,7 @@ enum aiComponent {
* Property type: String. Default value: n/a.
*/
#define AI_CONFIG_IMPORT_MD3_SHADER_SRC \
- "IMPORT_MD3_SHADER_SRC"
+ "IMPORT_MD3_SHADER_SRC"
// ---------------------------------------------------------------------------
/** @brief Configures the LWO loader to load just one layer from the model.
@@ -795,8 +807,8 @@ enum aiComponent {
* layer name may not be empty.<br>
* Property type: Integer. Default value: all layers are loaded.
*/
-#define AI_CONFIG_IMPORT_LWO_ONE_LAYER_ONLY \
- "IMPORT_LWO_ONE_LAYER_ONLY"
+#define AI_CONFIG_IMPORT_LWO_ONE_LAYER_ONLY \
+ "IMPORT_LWO_ONE_LAYER_ONLY"
// ---------------------------------------------------------------------------
/** @brief Configures the MD5 loader to not load the MD5ANIM file for
@@ -809,8 +821,8 @@ enum aiComponent {
*
* * Property type: bool. Default value: false.
*/
-#define AI_CONFIG_IMPORT_MD5_NO_ANIM_AUTOLOAD \
- "IMPORT_MD5_NO_ANIM_AUTOLOAD"
+#define AI_CONFIG_IMPORT_MD5_NO_ANIM_AUTOLOAD \
+ "IMPORT_MD5_NO_ANIM_AUTOLOAD"
// ---------------------------------------------------------------------------
/** @brief Defines the begin of the time range for which the LWS loader
@@ -828,10 +840,10 @@ enum aiComponent {
*
* @see AI_CONFIG_IMPORT_LWS_ANIM_END - end of the imported time range
*/
-#define AI_CONFIG_IMPORT_LWS_ANIM_START \
- "IMPORT_LWS_ANIM_START"
-#define AI_CONFIG_IMPORT_LWS_ANIM_END \
- "IMPORT_LWS_ANIM_END"
+#define AI_CONFIG_IMPORT_LWS_ANIM_START \
+ "IMPORT_LWS_ANIM_START"
+#define AI_CONFIG_IMPORT_LWS_ANIM_END \
+ "IMPORT_LWS_ANIM_END"
// ---------------------------------------------------------------------------
/** @brief Defines the output frame rate of the IRR loader.
@@ -841,8 +853,8 @@ enum aiComponent {
* are returned by the converter.<br>
* Property type: integer. Default value: 100
*/
-#define AI_CONFIG_IMPORT_IRR_ANIM_FPS \
- "IMPORT_IRR_ANIM_FPS"
+#define AI_CONFIG_IMPORT_IRR_ANIM_FPS \
+ "IMPORT_IRR_ANIM_FPS"
// ---------------------------------------------------------------------------
/** @brief Ogre Importer will try to find referenced materials from this file.
@@ -855,7 +867,7 @@ enum aiComponent {
* Property type: String. Default value: Scene.material.
*/
#define AI_CONFIG_IMPORT_OGRE_MATERIAL_FILE \
- "IMPORT_OGRE_MATERIAL_FILE"
+ "IMPORT_OGRE_MATERIAL_FILE"
// ---------------------------------------------------------------------------
/** @brief Ogre Importer detect the texture usage from its filename.
@@ -875,15 +887,15 @@ enum aiComponent {
* Property type: Bool. Default value: false.
*/
#define AI_CONFIG_IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME \
- "IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME"
+ "IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME"
-/** @brief Specifies whether the Android JNI asset extraction is supported.
+ /** @brief Specifies whether the Android JNI asset extraction is supported.
*
* Turn on this option if you want to manage assets in native
* Android application without having to keep the internal directory and asset
* manager pointer.
*/
-#define AI_CONFIG_ANDROID_JNI_ASSIMP_MANAGER_SUPPORT "AI_CONFIG_ANDROID_JNI_ASSIMP_MANAGER_SUPPORT"
+ #define AI_CONFIG_ANDROID_JNI_ASSIMP_MANAGER_SUPPORT "AI_CONFIG_ANDROID_JNI_ASSIMP_MANAGER_SUPPORT"
// ---------------------------------------------------------------------------
/** @brief Specifies whether the IFC loader skips over IfcSpace elements.
@@ -922,7 +934,7 @@ enum aiComponent {
// default value for AI_CONFIG_IMPORT_IFC_SMOOTHING_ANGLE
#if (!defined AI_IMPORT_IFC_DEFAULT_SMOOTHING_ANGLE)
-#define AI_IMPORT_IFC_DEFAULT_SMOOTHING_ANGLE 10.0f
+# define AI_IMPORT_IFC_DEFAULT_SMOOTHING_ANGLE 10.0f
#endif
// ---------------------------------------------------------------------------
@@ -938,7 +950,7 @@ enum aiComponent {
// default value for AI_CONFIG_IMPORT_IFC_CYLINDRICAL_TESSELLATION
#if (!defined AI_IMPORT_IFC_DEFAULT_CYLINDRICAL_TESSELLATION)
-#define AI_IMPORT_IFC_DEFAULT_CYLINDRICAL_TESSELLATION 32
+# define AI_IMPORT_IFC_DEFAULT_CYLINDRICAL_TESSELLATION 32
#endif
// ---------------------------------------------------------------------------
@@ -969,8 +981,12 @@ enum aiComponent {
#define AI_CONFIG_EXPORT_XFILE_64BIT "EXPORT_XFILE_64BIT"
-/**
- *
+/** @brief Specifies whether the assimp export shall be able to export point clouds
+ *
+ * When this flag is not defined the render data has to contain valid faces.
+ * Point clouds are only a collection of vertices which have nor spatial organization
+ * by a face and the validation process will remove them. Enabling this feature will
+ * switch off the flag and enable the functionality to export pure point clouds.
*/
#define AI_CONFIG_EXPORT_POINT_CLOUDS "EXPORT_POINT_CLOUDS"
@@ -980,7 +996,7 @@ enum aiComponent {
#define AI_CONFIG_GLOBAL_SCALE_FACTOR_KEY "GLOBAL_SCALE_FACTOR"
#if (!defined AI_CONFIG_GLOBAL_SCALE_FACTOR_DEFAULT)
-#define AI_CONFIG_GLOBAL_SCALE_FACTOR_DEFAULT 1.0f
+# define AI_CONFIG_GLOBAL_SCALE_FACTOR_DEFAULT 1.0f
#endif // !! AI_DEBONE_THRESHOLD
#define AI_CONFIG_APP_SCALE_KEY "APP_SCALE_FACTOR"
@@ -997,7 +1013,6 @@ enum aiComponent {
* Property type: Bool. Default value: undefined.
*/
-/* #cmakedefine ASSIMP_DOUBLE_PRECISION 1 */
+/* #undef ASSIMP_DOUBLE_PRECISION */
#endif // !! AI_CONFIG_H_INC
-
diff --git a/thirdparty/assimp/code/revision.h b/thirdparty/assimp/revision.h
index 66eb875303..66eb875303 100644
--- a/thirdparty/assimp/code/revision.h
+++ b/thirdparty/assimp/revision.h
diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h b/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h
index e29c055b9a..40164b6187 100644
--- a/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h
+++ b/thirdparty/glslang/SPIRV/GLSL.ext.EXT.h
@@ -34,5 +34,6 @@ static const char* const E_SPV_EXT_shader_stencil_export = "SPV_EXT_shade
static const char* const E_SPV_EXT_shader_viewport_index_layer = "SPV_EXT_shader_viewport_index_layer";
static const char* const E_SPV_EXT_fragment_fully_covered = "SPV_EXT_fragment_fully_covered";
static const char* const E_SPV_EXT_fragment_invocation_density = "SPV_EXT_fragment_invocation_density";
+static const char* const E_SPV_EXT_demote_to_helper_invocation = "SPV_EXT_demote_to_helper_invocation";
#endif // #ifndef GLSLextEXT_H
diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h b/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h
index 333442bb3e..e58e836a8d 100644
--- a/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h
+++ b/thirdparty/glslang/SPIRV/GLSL.ext.KHR.h
@@ -41,5 +41,8 @@ static const char* const E_SPV_KHR_storage_buffer_storage_class = "SPV_KHR_stora
static const char* const E_SPV_KHR_post_depth_coverage = "SPV_KHR_post_depth_coverage";
static const char* const E_SPV_KHR_vulkan_memory_model = "SPV_KHR_vulkan_memory_model";
static const char* const E_SPV_EXT_physical_storage_buffer = "SPV_EXT_physical_storage_buffer";
+static const char* const E_SPV_KHR_physical_storage_buffer = "SPV_KHR_physical_storage_buffer";
+static const char* const E_SPV_EXT_fragment_shader_interlock = "SPV_EXT_fragment_shader_interlock";
+static const char* const E_SPV_KHR_shader_clock = "SPV_KHR_shader_clock";
#endif // #ifndef GLSLextKHR_H
diff --git a/thirdparty/glslang/SPIRV/GLSL.ext.NV.h b/thirdparty/glslang/SPIRV/GLSL.ext.NV.h
index ede2c570eb..50146da104 100644
--- a/thirdparty/glslang/SPIRV/GLSL.ext.NV.h
+++ b/thirdparty/glslang/SPIRV/GLSL.ext.NV.h
@@ -75,4 +75,7 @@ const char* const E_SPV_NV_shading_rate = "SPV_NV_shading_rate";
//SPV_NV_cooperative_matrix
const char* const E_SPV_NV_cooperative_matrix = "SPV_NV_cooperative_matrix";
+//SPV_NV_shader_sm_builtins
+const char* const E_SPV_NV_shader_sm_builtins = "SPV_NV_shader_sm_builtins";
+
#endif // #ifndef GLSLextNV_H
diff --git a/thirdparty/glslang/SPIRV/GlslangToSpv.cpp b/thirdparty/glslang/SPIRV/GlslangToSpv.cpp
index 4ef6cd7fc1..0c8a87e3ce 100644..100755
--- a/thirdparty/glslang/SPIRV/GlslangToSpv.cpp
+++ b/thirdparty/glslang/SPIRV/GlslangToSpv.cpp
@@ -46,9 +46,7 @@ namespace spv {
#include "GLSL.std.450.h"
#include "GLSL.ext.KHR.h"
#include "GLSL.ext.EXT.h"
-#ifdef AMD_EXTENSIONS
#include "GLSL.ext.AMD.h"
-#endif
#include "GLSL.ext.NV.h"
}
@@ -89,9 +87,29 @@ private:
};
struct OpDecorations {
+ public:
+ OpDecorations(spv::Decoration precision, spv::Decoration noContraction, spv::Decoration nonUniform) :
+ precision(precision)
+#ifndef GLSLANG_WEB
+ ,
+ noContraction(noContraction),
+ nonUniform(nonUniform)
+#endif
+ { }
+
spv::Decoration precision;
- spv::Decoration noContraction;
- spv::Decoration nonUniform;
+
+#ifdef GLSLANG_WEB
+ void addNoContraction(spv::Builder&, spv::Id) const { }
+ void addNonUniform(spv::Builder&, spv::Id) const { }
+#else
+ void addNoContraction(spv::Builder& builder, spv::Id t) { builder.addDecoration(t, noContraction); }
+ void addNonUniform(spv::Builder& builder, spv::Id t) { builder.addDecoration(t, nonUniform); }
+ protected:
+ spv::Decoration noContraction;
+ spv::Decoration nonUniform;
+#endif
+
};
} // namespace
@@ -138,7 +156,7 @@ protected:
spv::LoopControlMask TranslateLoopControl(const glslang::TIntermLoop&, std::vector<unsigned int>& operands) const;
spv::StorageClass TranslateStorageClass(const glslang::TType&);
void addIndirectionIndexCapabilities(const glslang::TType& baseType, const glslang::TType& indexType);
- spv::Id createSpvVariable(const glslang::TIntermSymbol*);
+ spv::Id createSpvVariable(const glslang::TIntermSymbol*, spv::Id forcedType);
spv::Id getSampledType(const glslang::TSampler&);
spv::Id getInvertedSwizzleType(const glslang::TIntermTyped&);
spv::Id createInvertedSwizzle(spv::Decoration precision, const glslang::TIntermTyped&, spv::Id parentResult);
@@ -169,7 +187,7 @@ protected:
void makeGlobalInitializers(const glslang::TIntermSequence&);
void visitFunctions(const glslang::TIntermSequence&);
void handleFunctionEntry(const glslang::TIntermAggregate* node);
- void translateArguments(const glslang::TIntermAggregate& node, std::vector<spv::Id>& arguments);
+ void translateArguments(const glslang::TIntermAggregate& node, std::vector<spv::Id>& arguments, spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags);
void translateArguments(glslang::TIntermUnary& node, std::vector<spv::Id>& arguments);
spv::Id createImageTextureFunctionCall(glslang::TIntermOperator* node);
spv::Id handleUserFunctionCall(const glslang::TIntermAggregate*);
@@ -178,36 +196,30 @@ protected:
glslang::TBasicType typeProxy, bool reduceComparison = true);
spv::Id createBinaryMatrixOperation(spv::Op, OpDecorations&, spv::Id typeId, spv::Id left, spv::Id right);
spv::Id createUnaryOperation(glslang::TOperator op, OpDecorations&, spv::Id typeId, spv::Id operand,
- glslang::TBasicType typeProxy);
+ glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags);
spv::Id createUnaryMatrixOperation(spv::Op op, OpDecorations&, spv::Id typeId, spv::Id operand,
glslang::TBasicType typeProxy);
spv::Id createConversion(glslang::TOperator op, OpDecorations&, spv::Id destTypeId, spv::Id operand,
glslang::TBasicType typeProxy);
spv::Id createIntWidthConversion(glslang::TOperator op, spv::Id operand, int vectorSize);
spv::Id makeSmearedConstant(spv::Id constant, int vectorSize);
- spv::Id createAtomicOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy);
+ spv::Id createAtomicOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags);
spv::Id createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy);
spv::Id CreateInvocationsVectorOperation(spv::Op op, spv::GroupOperation groupOperation, spv::Id typeId, std::vector<spv::Id>& operands);
spv::Id createSubgroupOperation(glslang::TOperator op, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy);
spv::Id createMiscOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy);
spv::Id createNoArgOperation(glslang::TOperator op, spv::Decoration precision, spv::Id typeId);
spv::Id getSymbolId(const glslang::TIntermSymbol* node);
-#ifdef NV_EXTENSIONS
void addMeshNVDecoration(spv::Id id, int member, const glslang::TQualifier & qualifier);
-#endif
spv::Id createSpvConstant(const glslang::TIntermTyped&);
spv::Id createSpvConstantFromConstUnionArray(const glslang::TType& type, const glslang::TConstUnionArray&, int& nextConst, bool specConstant);
bool isTrivialLeaf(const glslang::TIntermTyped* node);
bool isTrivial(const glslang::TIntermTyped* node);
spv::Id createShortCircuit(glslang::TOperator, glslang::TIntermTyped& left, glslang::TIntermTyped& right);
-#ifdef AMD_EXTENSIONS
spv::Id getExtBuiltins(const char* name);
-#endif
- void addPre13Extension(const char* ext)
- {
- if (builder.getSpvVersion() < glslang::EShTargetSpv_1_3)
- builder.addExtension(ext);
- }
+ std::pair<spv::Id, spv::Id> getForcedType(spv::BuiltIn, const glslang::TType&);
+ spv::Id translateForcedType(spv::Id object);
+ spv::Id createCompositeConstruct(spv::Id typeId, std::vector<spv::Id> constituents);
glslang::SpvOptions& options;
spv::Function* shaderEntry;
@@ -224,6 +236,7 @@ protected:
bool linkageOnly; // true when visiting the set of objects in the AST present only for establishing interface, whether or not they were statically used
std::set<spv::Id> iOSet; // all input/output variables from either static use or declaration of interface
const glslang::TIntermediate* glslangIntermediate;
+ bool nanMinMaxClamp; // true if use NMin/NMax/NClamp instead of FMin/FMax/FClamp
spv::Id stdBuiltins;
std::unordered_map<const char*, spv::Id> extBuiltinMap;
@@ -232,11 +245,17 @@ protected:
std::unordered_map<std::string, spv::Function*> functionMap;
std::unordered_map<const glslang::TTypeList*, spv::Id> structMap[glslang::ElpCount][glslang::ElmCount];
// for mapping glslang block indices to spv indices (e.g., due to hidden members):
- std::unordered_map<const glslang::TTypeList*, std::vector<int> > memberRemapper;
+ std::unordered_map<int, std::vector<int>> memberRemapper;
+ // for mapping glslang symbol struct to symbol Id
+ std::unordered_map<const glslang::TTypeList*, int> glslangTypeToIdMap;
std::stack<bool> breakForLoop; // false means break for switch
std::unordered_map<std::string, const glslang::TIntermSymbol*> counterOriginator;
// Map pointee types for EbtReference to their forward pointers
std::map<const glslang::TType *, spv::Id> forwardPointers;
+ // Type forcing, for when SPIR-V wants a different type than the AST,
+ // requiring local translation to and from SPIR-V type on every access.
+ // Maps <builtin-variable-id -> AST-required-type-id>
+ std::unordered_map<spv::Id, spv::Id> forceType;
};
//
@@ -246,6 +265,10 @@ protected:
// Translate glslang profile to SPIR-V source language.
spv::SourceLanguage TranslateSourceLanguage(glslang::EShSource source, EProfile profile)
{
+#ifdef GLSLANG_WEB
+ return spv::SourceLanguageESSL;
+#endif
+
switch (source) {
case glslang::EShSourceGlsl:
switch (profile) {
@@ -270,12 +293,12 @@ spv::ExecutionModel TranslateExecutionModel(EShLanguage stage)
{
switch (stage) {
case EShLangVertex: return spv::ExecutionModelVertex;
+ case EShLangFragment: return spv::ExecutionModelFragment;
+ case EShLangCompute: return spv::ExecutionModelGLCompute;
+#ifndef GLSLANG_WEB
case EShLangTessControl: return spv::ExecutionModelTessellationControl;
case EShLangTessEvaluation: return spv::ExecutionModelTessellationEvaluation;
case EShLangGeometry: return spv::ExecutionModelGeometry;
- case EShLangFragment: return spv::ExecutionModelFragment;
- case EShLangCompute: return spv::ExecutionModelGLCompute;
-#ifdef NV_EXTENSIONS
case EShLangRayGenNV: return spv::ExecutionModelRayGenerationNV;
case EShLangIntersectNV: return spv::ExecutionModelIntersectionNV;
case EShLangAnyHitNV: return spv::ExecutionModelAnyHitNV;
@@ -334,7 +357,7 @@ spv::Decoration TranslateBlockDecoration(const glslang::TType& type, bool useSto
case glslang::EvqBuffer: return useStorageBuffer ? spv::DecorationBlock : spv::DecorationBufferBlock;
case glslang::EvqVaryingIn: return spv::DecorationBlock;
case glslang::EvqVaryingOut: return spv::DecorationBlock;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
case glslang::EvqPayloadNV: return spv::DecorationBlock;
case glslang::EvqPayloadInNV: return spv::DecorationBlock;
case glslang::EvqHitAttrNV: return spv::DecorationBlock;
@@ -354,18 +377,18 @@ spv::Decoration TranslateBlockDecoration(const glslang::TType& type, bool useSto
void TranslateMemoryDecoration(const glslang::TQualifier& qualifier, std::vector<spv::Decoration>& memory, bool useVulkanMemoryModel)
{
if (!useVulkanMemoryModel) {
- if (qualifier.coherent)
+ if (qualifier.isCoherent())
memory.push_back(spv::DecorationCoherent);
- if (qualifier.volatil) {
+ if (qualifier.isVolatile()) {
memory.push_back(spv::DecorationVolatile);
memory.push_back(spv::DecorationCoherent);
}
}
- if (qualifier.restrict)
+ if (qualifier.isRestrict())
memory.push_back(spv::DecorationRestrict);
- if (qualifier.readonly)
+ if (qualifier.isReadOnly())
memory.push_back(spv::DecorationNonWritable);
- if (qualifier.writeonly)
+ if (qualifier.isWriteOnly())
memory.push_back(spv::DecorationNonReadable);
}
@@ -409,7 +432,7 @@ spv::Decoration TranslateLayoutDecoration(const glslang::TType& type, glslang::T
assert(type.getQualifier().layoutPacking == glslang::ElpNone);
}
return spv::DecorationMax;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
case glslang::EvqPayloadNV:
case glslang::EvqPayloadInNV:
case glslang::EvqHitAttrNV:
@@ -433,16 +456,14 @@ spv::Decoration TGlslangToSpvTraverser::TranslateInterpolationDecoration(const g
if (qualifier.smooth)
// Smooth decoration doesn't exist in SPIR-V 1.0
return spv::DecorationMax;
- else if (qualifier.nopersp)
+ else if (qualifier.isNonPerspective())
return spv::DecorationNoPerspective;
else if (qualifier.flat)
return spv::DecorationFlat;
-#ifdef AMD_EXTENSIONS
- else if (qualifier.explicitInterp) {
+ else if (qualifier.isExplicitInterpolation()) {
builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter);
return spv::DecorationExplicitInterpAMD;
}
-#endif
else
return spv::DecorationMax;
}
@@ -452,15 +473,18 @@ spv::Decoration TGlslangToSpvTraverser::TranslateInterpolationDecoration(const g
// should be applied.
spv::Decoration TGlslangToSpvTraverser::TranslateAuxiliaryStorageDecoration(const glslang::TQualifier& qualifier)
{
- if (qualifier.patch)
- return spv::DecorationPatch;
- else if (qualifier.centroid)
+ if (qualifier.centroid)
return spv::DecorationCentroid;
+#ifndef GLSLANG_WEB
+ else if (qualifier.patch)
+ return spv::DecorationPatch;
else if (qualifier.sample) {
builder.addCapability(spv::CapabilitySampleRateShading);
return spv::DecorationSample;
- } else
- return spv::DecorationMax;
+ }
+#endif
+
+ return spv::DecorationMax;
}
// If glslang type is invariant, return SPIR-V invariant decoration.
@@ -475,29 +499,36 @@ spv::Decoration TranslateInvariantDecoration(const glslang::TQualifier& qualifie
// If glslang type is noContraction, return SPIR-V NoContraction decoration.
spv::Decoration TranslateNoContractionDecoration(const glslang::TQualifier& qualifier)
{
- if (qualifier.noContraction)
+#ifndef GLSLANG_WEB
+ if (qualifier.isNoContraction())
return spv::DecorationNoContraction;
else
+#endif
return spv::DecorationMax;
}
// If glslang type is nonUniform, return SPIR-V NonUniform decoration.
spv::Decoration TGlslangToSpvTraverser::TranslateNonUniformDecoration(const glslang::TQualifier& qualifier)
{
+#ifndef GLSLANG_WEB
if (qualifier.isNonUniform()) {
- builder.addExtension("SPV_EXT_descriptor_indexing");
+ builder.addIncorporatedExtension("SPV_EXT_descriptor_indexing", spv::Spv_1_5);
builder.addCapability(spv::CapabilityShaderNonUniformEXT);
return spv::DecorationNonUniformEXT;
} else
+#endif
return spv::DecorationMax;
}
-spv::MemoryAccessMask TGlslangToSpvTraverser::TranslateMemoryAccess(const spv::Builder::AccessChain::CoherentFlags &coherentFlags)
+spv::MemoryAccessMask TGlslangToSpvTraverser::TranslateMemoryAccess(
+ const spv::Builder::AccessChain::CoherentFlags &coherentFlags)
{
- if (!glslangIntermediate->usingVulkanMemoryModel() || coherentFlags.isImage) {
- return spv::MemoryAccessMaskNone;
- }
spv::MemoryAccessMask mask = spv::MemoryAccessMaskNone;
+
+#ifndef GLSLANG_WEB
+ if (!glslangIntermediate->usingVulkanMemoryModel() || coherentFlags.isImage)
+ return mask;
+
if (coherentFlags.volatil ||
coherentFlags.coherent ||
coherentFlags.devicecoherent ||
@@ -516,15 +547,20 @@ spv::MemoryAccessMask TGlslangToSpvTraverser::TranslateMemoryAccess(const spv::B
if (mask != spv::MemoryAccessMaskNone) {
builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
}
+#endif
+
return mask;
}
-spv::ImageOperandsMask TGlslangToSpvTraverser::TranslateImageOperands(const spv::Builder::AccessChain::CoherentFlags &coherentFlags)
+spv::ImageOperandsMask TGlslangToSpvTraverser::TranslateImageOperands(
+ const spv::Builder::AccessChain::CoherentFlags &coherentFlags)
{
- if (!glslangIntermediate->usingVulkanMemoryModel()) {
- return spv::ImageOperandsMaskNone;
- }
spv::ImageOperandsMask mask = spv::ImageOperandsMaskNone;
+
+#ifndef GLSLANG_WEB
+ if (!glslangIntermediate->usingVulkanMemoryModel())
+ return mask;
+
if (coherentFlags.volatil ||
coherentFlags.coherent ||
coherentFlags.devicecoherent ||
@@ -543,12 +579,15 @@ spv::ImageOperandsMask TGlslangToSpvTraverser::TranslateImageOperands(const spv:
if (mask != spv::ImageOperandsMaskNone) {
builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
}
+#endif
+
return mask;
}
spv::Builder::AccessChain::CoherentFlags TGlslangToSpvTraverser::TranslateCoherent(const glslang::TType& type)
{
- spv::Builder::AccessChain::CoherentFlags flags;
+ spv::Builder::AccessChain::CoherentFlags flags = {};
+#ifndef GLSLANG_WEB
flags.coherent = type.getQualifier().coherent;
flags.devicecoherent = type.getQualifier().devicecoherent;
flags.queuefamilycoherent = type.getQualifier().queuefamilycoherent;
@@ -566,12 +605,16 @@ spv::Builder::AccessChain::CoherentFlags TGlslangToSpvTraverser::TranslateCohere
flags.coherent ||
flags.volatil;
flags.isImage = type.getBasicType() == glslang::EbtSampler;
+#endif
return flags;
}
-spv::Scope TGlslangToSpvTraverser::TranslateMemoryScope(const spv::Builder::AccessChain::CoherentFlags &coherentFlags)
+spv::Scope TGlslangToSpvTraverser::TranslateMemoryScope(
+ const spv::Builder::AccessChain::CoherentFlags &coherentFlags)
{
- spv::Scope scope;
+ spv::Scope scope = spv::ScopeMax;
+
+#ifndef GLSLANG_WEB
if (coherentFlags.volatil || coherentFlags.coherent) {
// coherent defaults to Device scope in the old model, QueueFamilyKHR scope in the new model
scope = glslangIntermediate->usingVulkanMemoryModel() ? spv::ScopeQueueFamilyKHR : spv::ScopeDevice;
@@ -583,12 +626,12 @@ spv::Scope TGlslangToSpvTraverser::TranslateMemoryScope(const spv::Builder::Acce
scope = spv::ScopeWorkgroup;
} else if (coherentFlags.subgroupcoherent) {
scope = spv::ScopeSubgroup;
- } else {
- scope = spv::ScopeMax;
}
if (glslangIntermediate->usingVulkanMemoryModel() && scope == spv::ScopeDevice) {
builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR);
}
+#endif
+
return scope;
}
@@ -601,6 +644,7 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
{
switch (builtIn) {
case glslang::EbvPointSize:
+#ifndef GLSLANG_WEB
// Defer adding the capability until the built-in is actually used.
if (! memberDeclaration) {
switch (glslangIntermediate->getStage()) {
@@ -615,8 +659,28 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
break;
}
}
+#endif
return spv::BuiltInPointSize;
+ case glslang::EbvPosition: return spv::BuiltInPosition;
+ case glslang::EbvVertexId: return spv::BuiltInVertexId;
+ case glslang::EbvInstanceId: return spv::BuiltInInstanceId;
+ case glslang::EbvVertexIndex: return spv::BuiltInVertexIndex;
+ case glslang::EbvInstanceIndex: return spv::BuiltInInstanceIndex;
+
+ case glslang::EbvFragCoord: return spv::BuiltInFragCoord;
+ case glslang::EbvPointCoord: return spv::BuiltInPointCoord;
+ case glslang::EbvFace: return spv::BuiltInFrontFacing;
+ case glslang::EbvFragDepth: return spv::BuiltInFragDepth;
+
+ case glslang::EbvNumWorkGroups: return spv::BuiltInNumWorkgroups;
+ case glslang::EbvWorkGroupSize: return spv::BuiltInWorkgroupSize;
+ case glslang::EbvWorkGroupId: return spv::BuiltInWorkgroupId;
+ case glslang::EbvLocalInvocationId: return spv::BuiltInLocalInvocationId;
+ case glslang::EbvLocalInvocationIndex: return spv::BuiltInLocalInvocationIndex;
+ case glslang::EbvGlobalInvocationId: return spv::BuiltInGlobalInvocationId;
+
+#ifndef GLSLANG_WEB
// These *Distance capabilities logically belong here, but if the member is declared and
// then never used, consumers of SPIR-V prefer the capability not be declared.
// They are now generated when used, rather than here when declared.
@@ -639,7 +703,7 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
glslangIntermediate->getStage() == EShLangTessControl ||
glslangIntermediate->getStage() == EShLangTessEvaluation) {
- builder.addExtension(spv::E_SPV_EXT_shader_viewport_index_layer);
+ builder.addIncorporatedExtension(spv::E_SPV_EXT_shader_viewport_index_layer, spv::Spv_1_5);
builder.addCapability(spv::CapabilityShaderViewportIndexLayerEXT);
}
return spv::BuiltInViewportIndex;
@@ -656,39 +720,31 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
return spv::BuiltInSampleMask;
case glslang::EbvLayer:
-#ifdef NV_EXTENSIONS
if (glslangIntermediate->getStage() == EShLangMeshNV) {
return spv::BuiltInLayer;
}
-#endif
builder.addCapability(spv::CapabilityGeometry);
if (glslangIntermediate->getStage() == EShLangVertex ||
glslangIntermediate->getStage() == EShLangTessControl ||
glslangIntermediate->getStage() == EShLangTessEvaluation) {
- builder.addExtension(spv::E_SPV_EXT_shader_viewport_index_layer);
+ builder.addIncorporatedExtension(spv::E_SPV_EXT_shader_viewport_index_layer, spv::Spv_1_5);
builder.addCapability(spv::CapabilityShaderViewportIndexLayerEXT);
}
return spv::BuiltInLayer;
- case glslang::EbvPosition: return spv::BuiltInPosition;
- case glslang::EbvVertexId: return spv::BuiltInVertexId;
- case glslang::EbvInstanceId: return spv::BuiltInInstanceId;
- case glslang::EbvVertexIndex: return spv::BuiltInVertexIndex;
- case glslang::EbvInstanceIndex: return spv::BuiltInInstanceIndex;
-
case glslang::EbvBaseVertex:
- addPre13Extension(spv::E_SPV_KHR_shader_draw_parameters);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_shader_draw_parameters, spv::Spv_1_3);
builder.addCapability(spv::CapabilityDrawParameters);
return spv::BuiltInBaseVertex;
case glslang::EbvBaseInstance:
- addPre13Extension(spv::E_SPV_KHR_shader_draw_parameters);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_shader_draw_parameters, spv::Spv_1_3);
builder.addCapability(spv::CapabilityDrawParameters);
return spv::BuiltInBaseInstance;
case glslang::EbvDrawId:
- addPre13Extension(spv::E_SPV_KHR_shader_draw_parameters);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_shader_draw_parameters, spv::Spv_1_3);
builder.addCapability(spv::CapabilityDrawParameters);
return spv::BuiltInDrawIndex;
@@ -707,17 +763,7 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
case glslang::EbvTessLevelOuter: return spv::BuiltInTessLevelOuter;
case glslang::EbvTessCoord: return spv::BuiltInTessCoord;
case glslang::EbvPatchVertices: return spv::BuiltInPatchVertices;
- case glslang::EbvFragCoord: return spv::BuiltInFragCoord;
- case glslang::EbvPointCoord: return spv::BuiltInPointCoord;
- case glslang::EbvFace: return spv::BuiltInFrontFacing;
- case glslang::EbvFragDepth: return spv::BuiltInFragDepth;
case glslang::EbvHelperInvocation: return spv::BuiltInHelperInvocation;
- case glslang::EbvNumWorkGroups: return spv::BuiltInNumWorkgroups;
- case glslang::EbvWorkGroupSize: return spv::BuiltInWorkgroupSize;
- case glslang::EbvWorkGroupId: return spv::BuiltInWorkgroupId;
- case glslang::EbvLocalInvocationId: return spv::BuiltInLocalInvocationId;
- case glslang::EbvLocalInvocationIndex: return spv::BuiltInLocalInvocationIndex;
- case glslang::EbvGlobalInvocationId: return spv::BuiltInGlobalInvocationId;
case glslang::EbvSubGroupSize:
builder.addExtension(spv::E_SPV_KHR_shader_ballot);
@@ -732,27 +778,27 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
case glslang::EbvSubGroupEqMask:
builder.addExtension(spv::E_SPV_KHR_shader_ballot);
builder.addCapability(spv::CapabilitySubgroupBallotKHR);
- return spv::BuiltInSubgroupEqMaskKHR;
+ return spv::BuiltInSubgroupEqMask;
case glslang::EbvSubGroupGeMask:
builder.addExtension(spv::E_SPV_KHR_shader_ballot);
builder.addCapability(spv::CapabilitySubgroupBallotKHR);
- return spv::BuiltInSubgroupGeMaskKHR;
+ return spv::BuiltInSubgroupGeMask;
case glslang::EbvSubGroupGtMask:
builder.addExtension(spv::E_SPV_KHR_shader_ballot);
builder.addCapability(spv::CapabilitySubgroupBallotKHR);
- return spv::BuiltInSubgroupGtMaskKHR;
+ return spv::BuiltInSubgroupGtMask;
case glslang::EbvSubGroupLeMask:
builder.addExtension(spv::E_SPV_KHR_shader_ballot);
builder.addCapability(spv::CapabilitySubgroupBallotKHR);
- return spv::BuiltInSubgroupLeMaskKHR;
+ return spv::BuiltInSubgroupLeMask;
case glslang::EbvSubGroupLtMask:
builder.addExtension(spv::E_SPV_KHR_shader_ballot);
builder.addCapability(spv::CapabilitySubgroupBallotKHR);
- return spv::BuiltInSubgroupLtMaskKHR;
+ return spv::BuiltInSubgroupLtMask;
case glslang::EbvNumSubgroups:
builder.addCapability(spv::CapabilityGroupNonUniform);
@@ -794,7 +840,7 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
builder.addCapability(spv::CapabilityGroupNonUniform);
builder.addCapability(spv::CapabilityGroupNonUniformBallot);
return spv::BuiltInSubgroupLtMask;
-#ifdef AMD_EXTENSIONS
+
case glslang::EbvBaryCoordNoPersp:
builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter);
return spv::BuiltInBaryCoordNoPerspAMD;
@@ -822,15 +868,14 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
case glslang::EbvBaryCoordPullModel:
builder.addExtension(spv::E_SPV_AMD_shader_explicit_vertex_parameter);
return spv::BuiltInBaryCoordPullModelAMD;
-#endif
case glslang::EbvDeviceIndex:
- addPre13Extension(spv::E_SPV_KHR_device_group);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_device_group, spv::Spv_1_3);
builder.addCapability(spv::CapabilityDeviceGroup);
return spv::BuiltInDeviceIndex;
case glslang::EbvViewIndex:
- addPre13Extension(spv::E_SPV_KHR_multiview);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_multiview, spv::Spv_1_3);
builder.addCapability(spv::CapabilityMultiView);
return spv::BuiltInViewIndex;
@@ -844,7 +889,6 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
builder.addCapability(spv::CapabilityFragmentDensityEXT);
return spv::BuiltInFragInvocationCountEXT;
-#ifdef NV_EXTENSIONS
case glslang::EbvViewportMaskNV:
if (!memberDeclaration) {
builder.addExtension(spv::E_SPV_NV_viewport_array2);
@@ -888,7 +932,7 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
builder.addCapability(spv::CapabilityShadingRateNV);
return spv::BuiltInInvocationsPerPixelNV;
- // raytracing
+ // ray tracing
case glslang::EbvLaunchIdNV:
return spv::BuiltInLaunchIdNV;
case glslang::EbvLaunchSizeNV:
@@ -917,6 +961,8 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
return spv::BuiltInWorldToObjectNV;
case glslang::EbvIncomingRayFlagsNV:
return spv::BuiltInIncomingRayFlagsNV;
+
+ // barycentrics
case glslang::EbvBaryCoordNV:
builder.addExtension(spv::E_SPV_NV_fragment_shader_barycentric);
builder.addCapability(spv::CapabilityFragmentBarycentricNV);
@@ -925,23 +971,44 @@ spv::BuiltIn TGlslangToSpvTraverser::TranslateBuiltInDecoration(glslang::TBuiltI
builder.addExtension(spv::E_SPV_NV_fragment_shader_barycentric);
builder.addCapability(spv::CapabilityFragmentBarycentricNV);
return spv::BuiltInBaryCoordNoPerspNV;
- case glslang::EbvTaskCountNV:
+
+ // mesh shaders
+ case glslang::EbvTaskCountNV:
return spv::BuiltInTaskCountNV;
- case glslang::EbvPrimitiveCountNV:
+ case glslang::EbvPrimitiveCountNV:
return spv::BuiltInPrimitiveCountNV;
- case glslang::EbvPrimitiveIndicesNV:
+ case glslang::EbvPrimitiveIndicesNV:
return spv::BuiltInPrimitiveIndicesNV;
- case glslang::EbvClipDistancePerViewNV:
+ case glslang::EbvClipDistancePerViewNV:
return spv::BuiltInClipDistancePerViewNV;
- case glslang::EbvCullDistancePerViewNV:
+ case glslang::EbvCullDistancePerViewNV:
return spv::BuiltInCullDistancePerViewNV;
- case glslang::EbvLayerPerViewNV:
+ case glslang::EbvLayerPerViewNV:
return spv::BuiltInLayerPerViewNV;
- case glslang::EbvMeshViewCountNV:
+ case glslang::EbvMeshViewCountNV:
return spv::BuiltInMeshViewCountNV;
- case glslang::EbvMeshViewIndicesNV:
+ case glslang::EbvMeshViewIndicesNV:
return spv::BuiltInMeshViewIndicesNV;
-#endif
+
+ // sm builtins
+ case glslang::EbvWarpsPerSM:
+ builder.addExtension(spv::E_SPV_NV_shader_sm_builtins);
+ builder.addCapability(spv::CapabilityShaderSMBuiltinsNV);
+ return spv::BuiltInWarpsPerSMNV;
+ case glslang::EbvSMCount:
+ builder.addExtension(spv::E_SPV_NV_shader_sm_builtins);
+ builder.addCapability(spv::CapabilityShaderSMBuiltinsNV);
+ return spv::BuiltInSMCountNV;
+ case glslang::EbvWarpID:
+ builder.addExtension(spv::E_SPV_NV_shader_sm_builtins);
+ builder.addCapability(spv::CapabilityShaderSMBuiltinsNV);
+ return spv::BuiltInWarpIDNV;
+ case glslang::EbvSMID:
+ builder.addExtension(spv::E_SPV_NV_shader_sm_builtins);
+ builder.addCapability(spv::CapabilityShaderSMBuiltinsNV);
+ return spv::BuiltInSMIDNV;
+#endif
+
default:
return spv::BuiltInMax;
}
@@ -952,8 +1019,12 @@ spv::ImageFormat TGlslangToSpvTraverser::TranslateImageFormat(const glslang::TTy
{
assert(type.getBasicType() == glslang::EbtSampler);
+#ifdef GLSLANG_WEB
+ return spv::ImageFormatUnknown;
+#endif
+
// Check for capabilities
- switch (type.getQualifier().layoutFormat) {
+ switch (type.getQualifier().getFormat()) {
case glslang::ElfRg32f:
case glslang::ElfRg16f:
case glslang::ElfR11fG11fB10f:
@@ -990,7 +1061,7 @@ spv::ImageFormat TGlslangToSpvTraverser::TranslateImageFormat(const glslang::TTy
}
// do the translation
- switch (type.getQualifier().layoutFormat) {
+ switch (type.getQualifier().getFormat()) {
case glslang::ElfNone: return spv::ImageFormatUnknown;
case glslang::ElfRgba32f: return spv::ImageFormatRgba32f;
case glslang::ElfRgba16f: return spv::ImageFormatRgba16f;
@@ -1104,27 +1175,25 @@ spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::T
return spv::StorageClassOutput;
if (glslangIntermediate->getSource() != glslang::EShSourceHlsl ||
- type.getQualifier().storage == glslang::EvqUniform) {
- if (type.getBasicType() == glslang::EbtAtomicUint)
+ type.getQualifier().storage == glslang::EvqUniform) {
+ if (type.isAtomic())
return spv::StorageClassAtomicCounter;
if (type.containsOpaque())
return spv::StorageClassUniformConstant;
}
-#ifdef NV_EXTENSIONS
if (type.getQualifier().isUniformOrBuffer() &&
- type.getQualifier().layoutShaderRecordNV) {
+ type.getQualifier().isShaderRecordNV()) {
return spv::StorageClassShaderRecordBufferNV;
}
-#endif
if (glslangIntermediate->usingStorageBuffer() && type.getQualifier().storage == glslang::EvqBuffer) {
- addPre13Extension(spv::E_SPV_KHR_storage_buffer_storage_class);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_storage_buffer_storage_class, spv::Spv_1_3);
return spv::StorageClassStorageBuffer;
}
if (type.getQualifier().isUniformOrBuffer()) {
- if (type.getQualifier().layoutPushConstant)
+ if (type.getQualifier().isPushConstant())
return spv::StorageClassPushConstant;
if (type.getBasicType() == glslang::EbtBlock)
return spv::StorageClassUniform;
@@ -1132,11 +1201,11 @@ spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::T
}
switch (type.getQualifier().storage) {
- case glslang::EvqShared: return spv::StorageClassWorkgroup;
case glslang::EvqGlobal: return spv::StorageClassPrivate;
case glslang::EvqConstReadOnly: return spv::StorageClassFunction;
case glslang::EvqTemporary: return spv::StorageClassFunction;
-#ifdef NV_EXTENSIONS
+ case glslang::EvqShared: return spv::StorageClassWorkgroup;
+#ifndef GLSLANG_WEB
case glslang::EvqPayloadNV: return spv::StorageClassRayPayloadNV;
case glslang::EvqPayloadInNV: return spv::StorageClassIncomingRayPayloadNV;
case glslang::EvqHitAttrNV: return spv::StorageClassHitAttributeNV;
@@ -1155,15 +1224,16 @@ spv::StorageClass TGlslangToSpvTraverser::TranslateStorageClass(const glslang::T
void TGlslangToSpvTraverser::addIndirectionIndexCapabilities(const glslang::TType& baseType,
const glslang::TType& indexType)
{
+#ifndef GLSLANG_WEB
if (indexType.getQualifier().isNonUniform()) {
// deal with an asserted non-uniform index
// SPV_EXT_descriptor_indexing already added in TranslateNonUniformDecoration
if (baseType.getBasicType() == glslang::EbtSampler) {
if (baseType.getQualifier().hasAttachment())
builder.addCapability(spv::CapabilityInputAttachmentArrayNonUniformIndexingEXT);
- else if (baseType.isImage() && baseType.getSampler().dim == glslang::EsdBuffer)
+ else if (baseType.isImage() && baseType.getSampler().isBuffer())
builder.addCapability(spv::CapabilityStorageTexelBufferArrayNonUniformIndexingEXT);
- else if (baseType.isTexture() && baseType.getSampler().dim == glslang::EsdBuffer)
+ else if (baseType.isTexture() && baseType.getSampler().isBuffer())
builder.addCapability(spv::CapabilityUniformTexelBufferArrayNonUniformIndexingEXT);
else if (baseType.isImage())
builder.addCapability(spv::CapabilityStorageImageArrayNonUniformIndexingEXT);
@@ -1179,17 +1249,18 @@ void TGlslangToSpvTraverser::addIndirectionIndexCapabilities(const glslang::TTyp
// assume a dynamically uniform index
if (baseType.getBasicType() == glslang::EbtSampler) {
if (baseType.getQualifier().hasAttachment()) {
- builder.addExtension("SPV_EXT_descriptor_indexing");
+ builder.addIncorporatedExtension("SPV_EXT_descriptor_indexing", spv::Spv_1_5);
builder.addCapability(spv::CapabilityInputAttachmentArrayDynamicIndexingEXT);
- } else if (baseType.isImage() && baseType.getSampler().dim == glslang::EsdBuffer) {
- builder.addExtension("SPV_EXT_descriptor_indexing");
+ } else if (baseType.isImage() && baseType.getSampler().isBuffer()) {
+ builder.addIncorporatedExtension("SPV_EXT_descriptor_indexing", spv::Spv_1_5);
builder.addCapability(spv::CapabilityStorageTexelBufferArrayDynamicIndexingEXT);
- } else if (baseType.isTexture() && baseType.getSampler().dim == glslang::EsdBuffer) {
- builder.addExtension("SPV_EXT_descriptor_indexing");
+ } else if (baseType.isTexture() && baseType.getSampler().isBuffer()) {
+ builder.addIncorporatedExtension("SPV_EXT_descriptor_indexing", spv::Spv_1_5);
builder.addCapability(spv::CapabilityUniformTexelBufferArrayDynamicIndexingEXT);
}
}
}
+#endif
}
// Return whether or not the given type is something that should be tied to a
@@ -1199,10 +1270,8 @@ bool IsDescriptorResource(const glslang::TType& type)
// uniform and buffer blocks are included, unless it is a push_constant
if (type.getBasicType() == glslang::EbtBlock)
return type.getQualifier().isUniformOrBuffer() &&
-#ifdef NV_EXTENSIONS
- ! type.getQualifier().layoutShaderRecordNV &&
-#endif
- ! type.getQualifier().layoutPushConstant;
+ ! type.getQualifier().isShaderRecordNV() &&
+ ! type.getQualifier().isPushConstant();
// non block...
// basically samplerXXX/subpass/sampler/texture are all included
@@ -1222,16 +1291,21 @@ void InheritQualifiers(glslang::TQualifier& child, const glslang::TQualifier& pa
if (parent.invariant)
child.invariant = true;
- if (parent.nopersp)
- child.nopersp = true;
-#ifdef AMD_EXTENSIONS
- if (parent.explicitInterp)
- child.explicitInterp = true;
-#endif
if (parent.flat)
child.flat = true;
if (parent.centroid)
child.centroid = true;
+#ifndef GLSLANG_WEB
+ if (parent.nopersp)
+ child.nopersp = true;
+ if (parent.explicitInterp)
+ child.explicitInterp = true;
+ if (parent.perPrimitiveNV)
+ child.perPrimitiveNV = true;
+ if (parent.perViewNV)
+ child.perViewNV = true;
+ if (parent.perTaskNV)
+ child.perTaskNV = true;
if (parent.patch)
child.patch = true;
if (parent.sample)
@@ -1256,13 +1330,6 @@ void InheritQualifiers(glslang::TQualifier& child, const glslang::TQualifier& pa
child.readonly = true;
if (parent.writeonly)
child.writeonly = true;
-#ifdef NV_EXTENSIONS
- if (parent.perPrimitiveNV)
- child.perPrimitiveNV = true;
- if (parent.perViewNV)
- child.perViewNV = true;
- if (parent.perTaskNV)
- child.perTaskNV = true;
#endif
}
@@ -1291,7 +1358,8 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl
sequenceDepth(0), logger(buildLogger),
builder(spvVersion, (glslang::GetKhronosToolId() << 16) | glslang::GetSpirvGeneratorVersion(), logger),
inEntryPoint(false), entryPointTerminated(false), linkageOnly(false),
- glslangIntermediate(glslangIntermediate)
+ glslangIntermediate(glslangIntermediate),
+ nanMinMaxClamp(glslangIntermediate->getNanMinMaxClamp())
{
spv::ExecutionModel executionModel = TranslateExecutionModel(glslangIntermediate->getStage());
@@ -1332,13 +1400,13 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl
if (glslangIntermediate->usingPhysicalStorageBuffer()) {
addressingModel = spv::AddressingModelPhysicalStorageBuffer64EXT;
- builder.addExtension(spv::E_SPV_EXT_physical_storage_buffer);
+ builder.addIncorporatedExtension(spv::E_SPV_EXT_physical_storage_buffer, spv::Spv_1_5);
builder.addCapability(spv::CapabilityPhysicalStorageBufferAddressesEXT);
};
if (glslangIntermediate->usingVulkanMemoryModel()) {
memoryModel = spv::MemoryModelVulkanKHR;
builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
- builder.addExtension(spv::E_SPV_KHR_vulkan_memory_model);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_vulkan_memory_model, spv::Spv_1_5);
}
builder.setMemoryModel(addressingModel, memoryModel);
@@ -1367,6 +1435,84 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl
builder.addCapability(spv::CapabilityShader);
break;
+ case EShLangFragment:
+ builder.addCapability(spv::CapabilityShader);
+ if (glslangIntermediate->getPixelCenterInteger())
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModePixelCenterInteger);
+
+ if (glslangIntermediate->getOriginUpperLeft())
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeOriginUpperLeft);
+ else
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeOriginLowerLeft);
+
+ if (glslangIntermediate->getEarlyFragmentTests())
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeEarlyFragmentTests);
+
+ if (glslangIntermediate->getPostDepthCoverage()) {
+ builder.addCapability(spv::CapabilitySampleMaskPostDepthCoverage);
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModePostDepthCoverage);
+ builder.addExtension(spv::E_SPV_KHR_post_depth_coverage);
+ }
+
+ if (glslangIntermediate->getDepth() != glslang::EldUnchanged && glslangIntermediate->isDepthReplacing())
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeDepthReplacing);
+
+#ifndef GLSLANG_WEB
+ switch(glslangIntermediate->getDepth()) {
+ case glslang::EldGreater: mode = spv::ExecutionModeDepthGreater; break;
+ case glslang::EldLess: mode = spv::ExecutionModeDepthLess; break;
+ default: mode = spv::ExecutionModeMax; break;
+ }
+ if (mode != spv::ExecutionModeMax)
+ builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode);
+ switch (glslangIntermediate->getInterlockOrdering()) {
+ case glslang::EioPixelInterlockOrdered: mode = spv::ExecutionModePixelInterlockOrderedEXT;
+ break;
+ case glslang::EioPixelInterlockUnordered: mode = spv::ExecutionModePixelInterlockUnorderedEXT;
+ break;
+ case glslang::EioSampleInterlockOrdered: mode = spv::ExecutionModeSampleInterlockOrderedEXT;
+ break;
+ case glslang::EioSampleInterlockUnordered: mode = spv::ExecutionModeSampleInterlockUnorderedEXT;
+ break;
+ case glslang::EioShadingRateInterlockOrdered: mode = spv::ExecutionModeShadingRateInterlockOrderedEXT;
+ break;
+ case glslang::EioShadingRateInterlockUnordered: mode = spv::ExecutionModeShadingRateInterlockUnorderedEXT;
+ break;
+ default: mode = spv::ExecutionModeMax;
+ break;
+ }
+ if (mode != spv::ExecutionModeMax) {
+ builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode);
+ if (mode == spv::ExecutionModeShadingRateInterlockOrderedEXT ||
+ mode == spv::ExecutionModeShadingRateInterlockUnorderedEXT) {
+ builder.addCapability(spv::CapabilityFragmentShaderShadingRateInterlockEXT);
+ } else if (mode == spv::ExecutionModePixelInterlockOrderedEXT ||
+ mode == spv::ExecutionModePixelInterlockUnorderedEXT) {
+ builder.addCapability(spv::CapabilityFragmentShaderPixelInterlockEXT);
+ } else {
+ builder.addCapability(spv::CapabilityFragmentShaderSampleInterlockEXT);
+ }
+ builder.addExtension(spv::E_SPV_EXT_fragment_shader_interlock);
+ }
+#endif
+ break;
+
+ case EShLangCompute:
+ builder.addCapability(spv::CapabilityShader);
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeLocalSize, glslangIntermediate->getLocalSize(0),
+ glslangIntermediate->getLocalSize(1),
+ glslangIntermediate->getLocalSize(2));
+ if (glslangIntermediate->getLayoutDerivativeModeNone() == glslang::LayoutDerivativeGroupQuads) {
+ builder.addCapability(spv::CapabilityComputeDerivativeGroupQuadsNV);
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeDerivativeGroupQuadsNV);
+ builder.addExtension(spv::E_SPV_NV_compute_shader_derivatives);
+ } else if (glslangIntermediate->getLayoutDerivativeModeNone() == glslang::LayoutDerivativeGroupLinear) {
+ builder.addCapability(spv::CapabilityComputeDerivativeGroupLinearNV);
+ builder.addExecutionMode(shaderEntry, spv::ExecutionModeDerivativeGroupLinearNV);
+ builder.addExtension(spv::E_SPV_NV_compute_shader_derivatives);
+ }
+ break;
+#ifndef GLSLANG_WEB
case EShLangTessEvaluation:
case EShLangTessControl:
builder.addCapability(spv::CapabilityTessellation);
@@ -1436,56 +1582,6 @@ TGlslangToSpvTraverser::TGlslangToSpvTraverser(unsigned int spvVersion, const gl
builder.addExecutionMode(shaderEntry, spv::ExecutionModeOutputVertices, glslangIntermediate->getVertices());
break;
- case EShLangFragment:
- builder.addCapability(spv::CapabilityShader);
- if (glslangIntermediate->getPixelCenterInteger())
- builder.addExecutionMode(shaderEntry, spv::ExecutionModePixelCenterInteger);
-
- if (glslangIntermediate->getOriginUpperLeft())
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeOriginUpperLeft);
- else
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeOriginLowerLeft);
-
- if (glslangIntermediate->getEarlyFragmentTests())
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeEarlyFragmentTests);
-
- if (glslangIntermediate->getPostDepthCoverage()) {
- builder.addCapability(spv::CapabilitySampleMaskPostDepthCoverage);
- builder.addExecutionMode(shaderEntry, spv::ExecutionModePostDepthCoverage);
- builder.addExtension(spv::E_SPV_KHR_post_depth_coverage);
- }
-
- switch(glslangIntermediate->getDepth()) {
- case glslang::EldGreater: mode = spv::ExecutionModeDepthGreater; break;
- case glslang::EldLess: mode = spv::ExecutionModeDepthLess; break;
- default: mode = spv::ExecutionModeMax; break;
- }
- if (mode != spv::ExecutionModeMax)
- builder.addExecutionMode(shaderEntry, (spv::ExecutionMode)mode);
-
- if (glslangIntermediate->getDepth() != glslang::EldUnchanged && glslangIntermediate->isDepthReplacing())
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeDepthReplacing);
- break;
-
- case EShLangCompute:
- builder.addCapability(spv::CapabilityShader);
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeLocalSize, glslangIntermediate->getLocalSize(0),
- glslangIntermediate->getLocalSize(1),
- glslangIntermediate->getLocalSize(2));
-#ifdef NV_EXTENSIONS
- if (glslangIntermediate->getLayoutDerivativeModeNone() == glslang::LayoutDerivativeGroupQuads) {
- builder.addCapability(spv::CapabilityComputeDerivativeGroupQuadsNV);
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeDerivativeGroupQuadsNV);
- builder.addExtension(spv::E_SPV_NV_compute_shader_derivatives);
- } else if (glslangIntermediate->getLayoutDerivativeModeNone() == glslang::LayoutDerivativeGroupLinear) {
- builder.addCapability(spv::CapabilityComputeDerivativeGroupLinearNV);
- builder.addExecutionMode(shaderEntry, spv::ExecutionModeDerivativeGroupLinearNV);
- builder.addExtension(spv::E_SPV_NV_compute_shader_derivatives);
- }
-#endif
- break;
-
-#ifdef NV_EXTENSIONS
case EShLangRayGenNV:
case EShLangIntersectNV:
case EShLangAnyHitNV:
@@ -1536,8 +1632,10 @@ void TGlslangToSpvTraverser::finishSpv()
for (auto it = iOSet.cbegin(); it != iOSet.cend(); ++it)
entryPoint->addIdOperand(*it);
- // Add capabilities, extensions, remove unneeded decorations, etc.,
+ // Add capabilities, extensions, remove unneeded decorations, etc.,
// based on the resulting SPIR-V.
+ // Note: WebGPU code generation must have the opportunity to aggressively
+ // prune unreachable merge blocks and continue targets.
builder.postProcess();
}
@@ -1565,6 +1663,9 @@ void TGlslangToSpvTraverser::dumpSpv(std::vector<unsigned int>& out)
void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
{
SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder);
+ if (symbol->getType().isStruct())
+ glslangTypeToIdMap[symbol->getType().getStruct()] = symbol->getId();
+
if (symbol->getType().getQualifier().isSpecConstant())
spec_constant_op_mode_setter.turnOnSpecConstantOpMode();
@@ -1572,8 +1673,8 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
// Formal function parameters were mapped during makeFunctions().
spv::Id id = getSymbolId(symbol);
- // Include all "static use" and "linkage only" interface variables on the OpEntryPoint instruction
if (builder.isPointer(id)) {
+ // Include all "static use" and "linkage only" interface variables on the OpEntryPoint instruction
// Consider adding to the OpEntryPoint interface list.
// Only looking at structures if they have at least one member.
if (!symbol->getType().isStruct() || symbol->getType().getStruct()->size() > 0) {
@@ -1585,6 +1686,14 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
iOSet.insert(id);
}
}
+
+ // If the SPIR-V type is required to be different than the AST type,
+ // translate now from the SPIR-V type to the AST type, for the consuming
+ // operation.
+ // Note this turns it from an l-value to an r-value.
+ // Currently, all symbols needing this are inputs; avoid the map lookup when non-input.
+ if (symbol->getType().getQualifier().storage == glslang::EvqVaryingIn)
+ id = translateForcedType(id);
}
// Only process non-linkage-only nodes for generating actual static uses
@@ -1602,13 +1711,16 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
// See comments in handleUserFunctionCall().
// B) Specialization constants (normal constants don't even come in as a variable),
// These are also pure R-values.
+ // C) R-Values from type translation, see above call to translateForcedType()
glslang::TQualifier qualifier = symbol->getQualifier();
- if (qualifier.isSpecConstant() || rValueParameters.find(symbol->getId()) != rValueParameters.end())
+ if (qualifier.isSpecConstant() || rValueParameters.find(symbol->getId()) != rValueParameters.end() ||
+ !builder.isPointerType(builder.getTypeId(id)))
builder.setAccessChainRValue(id);
else
builder.setAccessChainLValue(id);
}
+#ifdef ENABLE_HLSL
// Process linkage-only nodes for any special additional interface work.
if (linkageOnly) {
if (glslangIntermediate->getHlslFunctionality1()) {
@@ -1640,11 +1752,18 @@ void TGlslangToSpvTraverser::visitSymbol(glslang::TIntermSymbol* symbol)
}
}
}
+#endif
}
bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::TIntermBinary* node)
{
builder.setLine(node->getLoc().line, node->getLoc().getFilename());
+ if (node->getLeft()->getAsSymbolNode() != nullptr && node->getLeft()->getType().isStruct()) {
+ glslangTypeToIdMap[node->getLeft()->getType().getStruct()] = node->getLeft()->getAsSymbolNode()->getId();
+ }
+ if (node->getRight()->getAsSymbolNode() != nullptr && node->getRight()->getType().isStruct()) {
+ glslangTypeToIdMap[node->getRight()->getType().getStruct()] = node->getRight()->getAsSymbolNode()->getId();
+ }
SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder);
if (node->getType().getQualifier().isSpecConstant())
@@ -1734,7 +1853,7 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T
// Load through a block reference is performed with a dot operator that
// is mapped to EOpIndexDirectStruct. When we get to the actual reference,
// do a load and reset the access chain.
- if (node->getLeft()->getBasicType() == glslang::EbtReference &&
+ if (node->getLeft()->isReference() &&
!node->getLeft()->getType().isArray() &&
node->getOp() == glslang::EOpIndexDirectStruct)
{
@@ -1749,9 +1868,12 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T
{
// This may be, e.g., an anonymous block-member selection, which generally need
// index remapping due to hidden members in anonymous blocks.
- std::vector<int>& remapper = memberRemapper[node->getLeft()->getType().getStruct()];
- assert(remapper.size() > 0);
- spvIndex = remapper[glslangIndex];
+ int glslangId = glslangTypeToIdMap[node->getLeft()->getType().getStruct()];
+ if (memberRemapper.find(glslangId) != memberRemapper.end()) {
+ std::vector<int>& remapper = memberRemapper[glslangId];
+ assert(remapper.size() > 0);
+ spvIndex = remapper[glslangIndex];
+ }
}
// normal case for indexing array or structure or block
@@ -1860,6 +1982,71 @@ bool TGlslangToSpvTraverser::visitBinary(glslang::TVisit /* visit */, glslang::T
}
}
+// Figure out what, if any, type changes are needed when accessing a specific built-in.
+// Returns <the type SPIR-V requires for declarion, the type to translate to on use>.
+// Also see comment for 'forceType', regarding tracking SPIR-V-required types.
+std::pair<spv::Id, spv::Id> TGlslangToSpvTraverser::getForcedType(spv::BuiltIn builtIn,
+ const glslang::TType& glslangType)
+{
+ switch(builtIn)
+ {
+ case spv::BuiltInSubgroupEqMask:
+ case spv::BuiltInSubgroupGeMask:
+ case spv::BuiltInSubgroupGtMask:
+ case spv::BuiltInSubgroupLeMask:
+ case spv::BuiltInSubgroupLtMask: {
+ // these require changing a 64-bit scaler -> a vector of 32-bit components
+ if (glslangType.isVector())
+ break;
+ std::pair<spv::Id, spv::Id> ret(builder.makeVectorType(builder.makeUintType(32), 4),
+ builder.makeUintType(64));
+ return ret;
+ }
+ default:
+ break;
+ }
+
+ std::pair<spv::Id, spv::Id> ret(spv::NoType, spv::NoType);
+ return ret;
+}
+
+// For an object previously identified (see getForcedType() and forceType)
+// as needing type translations, do the translation needed for a load, turning
+// an L-value into in R-value.
+spv::Id TGlslangToSpvTraverser::translateForcedType(spv::Id object)
+{
+ const auto forceIt = forceType.find(object);
+ if (forceIt == forceType.end())
+ return object;
+
+ spv::Id desiredTypeId = forceIt->second;
+ spv::Id objectTypeId = builder.getTypeId(object);
+ assert(builder.isPointerType(objectTypeId));
+ objectTypeId = builder.getContainedTypeId(objectTypeId);
+ if (builder.isVectorType(objectTypeId) &&
+ builder.getScalarTypeWidth(builder.getContainedTypeId(objectTypeId)) == 32) {
+ if (builder.getScalarTypeWidth(desiredTypeId) == 64) {
+ // handle 32-bit v.xy* -> 64-bit
+ builder.clearAccessChain();
+ builder.setAccessChainLValue(object);
+ object = builder.accessChainLoad(spv::NoPrecision, spv::DecorationMax, objectTypeId);
+ std::vector<spv::Id> components;
+ components.push_back(builder.createCompositeExtract(object, builder.getContainedTypeId(objectTypeId), 0));
+ components.push_back(builder.createCompositeExtract(object, builder.getContainedTypeId(objectTypeId), 1));
+
+ spv::Id vecType = builder.makeVectorType(builder.getContainedTypeId(objectTypeId), 2);
+ return builder.createUnaryOp(spv::OpBitcast, desiredTypeId,
+ builder.createCompositeConstruct(vecType, components));
+ } else {
+ logger->missingFunctionality("forcing 32-bit vector type to non 64-bit scalar");
+ }
+ } else {
+ logger->missingFunctionality("forcing non 32-bit vector type");
+ }
+
+ return object;
+}
+
bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TIntermUnary* node)
{
builder.setLine(node->getLoc().line, node->getLoc().getFilename());
@@ -1935,20 +2122,31 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
invertedType = getInvertedSwizzleType(*node->getOperand());
builder.clearAccessChain();
+ TIntermNode *operandNode;
if (invertedType != spv::NoType)
- node->getOperand()->getAsBinaryNode()->getLeft()->traverse(this);
+ operandNode = node->getOperand()->getAsBinaryNode()->getLeft();
else
- node->getOperand()->traverse(this);
+ operandNode = node->getOperand();
+
+ operandNode->traverse(this);
spv::Id operand = spv::NoResult;
+ spv::Builder::AccessChain::CoherentFlags lvalueCoherentFlags;
+
+#ifndef GLSLANG_WEB
if (node->getOp() == glslang::EOpAtomicCounterIncrement ||
node->getOp() == glslang::EOpAtomicCounterDecrement ||
node->getOp() == glslang::EOpAtomicCounter ||
- node->getOp() == glslang::EOpInterpolateAtCentroid)
+ node->getOp() == glslang::EOpInterpolateAtCentroid) {
operand = builder.accessChainGetLValue(); // Special case l-value operands
- else
+ lvalueCoherentFlags = builder.getAccessChain().coherentFlags;
+ lvalueCoherentFlags |= TranslateCoherent(operandNode->getAsTyped()->getType());
+ } else
+#endif
+ {
operand = accessChainLoad(node->getOperand()->getType());
+ }
OpDecorations decorations = { TranslatePrecisionDecoration(node->getOperationPrecision()),
TranslateNoContractionDecoration(node->getType().getQualifier()),
@@ -1960,12 +2158,12 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
// if not, then possibly an operation
if (! result)
- result = createUnaryOperation(node->getOp(), decorations, resultType(), operand, node->getOperand()->getBasicType());
+ result = createUnaryOperation(node->getOp(), decorations, resultType(), operand, node->getOperand()->getBasicType(), lvalueCoherentFlags);
if (result) {
if (invertedType) {
result = createInvertedSwizzle(decorations.precision, *node->getOperand(), result);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNonUniform(builder, result);
}
builder.clearAccessChain();
@@ -1985,6 +2183,7 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
spv::Id one = 0;
if (node->getBasicType() == glslang::EbtFloat)
one = builder.makeFloatConstant(1.0F);
+#ifndef GLSLANG_WEB
else if (node->getBasicType() == glslang::EbtDouble)
one = builder.makeDoubleConstant(1.0);
else if (node->getBasicType() == glslang::EbtFloat16)
@@ -1995,6 +2194,7 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
one = builder.makeInt16Constant(1);
else if (node->getBasicType() == glslang::EbtInt64 || node->getBasicType() == glslang::EbtUint64)
one = builder.makeInt64Constant(1);
+#endif
else
one = builder.makeIntConstant(1);
glslang::TOperator op;
@@ -2022,12 +2222,14 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
return false;
+#ifndef GLSLANG_WEB
case glslang::EOpEmitStreamVertex:
builder.createNoResultOp(spv::OpEmitStreamVertex, operand);
return false;
case glslang::EOpEndStreamPrimitive:
builder.createNoResultOp(spv::OpEndStreamPrimitive, operand);
return false;
+#endif
default:
logger->missingFunctionality("unknown glslang unary");
@@ -2035,6 +2237,39 @@ bool TGlslangToSpvTraverser::visitUnary(glslang::TVisit /* visit */, glslang::TI
}
}
+// Construct a composite object, recursively copying members if their types don't match
+spv::Id TGlslangToSpvTraverser::createCompositeConstruct(spv::Id resultTypeId, std::vector<spv::Id> constituents)
+{
+ for (int c = 0; c < (int)constituents.size(); ++c) {
+ spv::Id& constituent = constituents[c];
+ spv::Id lType = builder.getContainedTypeId(resultTypeId, c);
+ spv::Id rType = builder.getTypeId(constituent);
+ if (lType != rType) {
+ if (glslangIntermediate->getSpv().spv >= glslang::EShTargetSpv_1_4) {
+ constituent = builder.createUnaryOp(spv::OpCopyLogical, lType, constituent);
+ } else if (builder.isStructType(rType)) {
+ std::vector<spv::Id> rTypeConstituents;
+ int numrTypeConstituents = builder.getNumTypeConstituents(rType);
+ for (int i = 0; i < numrTypeConstituents; ++i) {
+ rTypeConstituents.push_back(builder.createCompositeExtract(constituent, builder.getContainedTypeId(rType, i), i));
+ }
+ constituents[c] = createCompositeConstruct(lType, rTypeConstituents);
+ } else {
+ assert(builder.isArrayType(rType));
+ std::vector<spv::Id> rTypeConstituents;
+ int numrTypeConstituents = builder.getNumTypeConstituents(rType);
+
+ spv::Id elementRType = builder.getContainedTypeId(rType);
+ for (int i = 0; i < numrTypeConstituents; ++i) {
+ rTypeConstituents.push_back(builder.createCompositeExtract(constituent, elementRType, i));
+ }
+ constituents[c] = createCompositeConstruct(lType, rTypeConstituents);
+ }
+ }
+ }
+ return builder.createCompositeConstruct(resultTypeId, constituents);
+}
+
bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TIntermAggregate* node)
{
SpecConstantOpModeGuard spec_constant_op_mode_setter(&builder);
@@ -2052,14 +2287,15 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
builder.setAccessChainRValue(result);
return false;
- } else if (node->getOp() == glslang::EOpImageStore ||
-#ifdef AMD_EXTENSIONS
+ }
+#ifndef GLSLANG_WEB
+ else if (node->getOp() == glslang::EOpImageStore ||
node->getOp() == glslang::EOpImageStoreLod ||
-#endif
node->getOp() == glslang::EOpImageAtomicStore) {
// "imageStore" is a special case, which has no result
return false;
}
+#endif
glslang::TOperator binOp = glslang::EOpNull;
bool reduceComparison = true;
@@ -2067,6 +2303,8 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
bool noReturnValue = false;
bool atomic = false;
+ spv::Builder::AccessChain::CoherentFlags lvalueCoherentFlags;
+
assert(node->getOp());
spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision());
@@ -2264,7 +2502,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
{
builder.setLine(node->getLoc().line, node->getLoc().getFilename());
std::vector<spv::Id> arguments;
- translateArguments(*node, arguments);
+ translateArguments(*node, arguments, lvalueCoherentFlags);
spv::Id constructed;
if (node->getOp() == glslang::EOpConstructTextureSampler)
constructed = builder.createOp(spv::OpSampledImage, resultType(), arguments);
@@ -2274,7 +2512,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
std::vector<spv::Id> constituents;
for (int c = 0; c < (int)arguments.size(); ++c)
constituents.push_back(arguments[c]);
- constructed = builder.createCompositeConstruct(resultType(), constituents);
+ constructed = createCompositeConstruct(resultType(), constituents);
} else if (isMatrix)
constructed = builder.createMatrixConstructor(precision, arguments, resultType());
else
@@ -2327,6 +2565,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
// which can be emitted by the one in createBinaryOperation()
binOp = glslang::EOpMod;
break;
+
case glslang::EOpEmitVertex:
case glslang::EOpEndPrimitive:
case glslang::EOpBarrier:
@@ -2350,10 +2589,6 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
// These all have 0 operands and will naturally finish up in the code below for 0 operands
break;
- case glslang::EOpAtomicStore:
- noReturnValue = true;
- // fallthrough
- case glslang::EOpAtomicLoad:
case glslang::EOpAtomicAdd:
case glslang::EOpAtomicMin:
case glslang::EOpAtomicMax:
@@ -2365,6 +2600,14 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
atomic = true;
break;
+#ifndef GLSLANG_WEB
+ case glslang::EOpAtomicStore:
+ noReturnValue = true;
+ // fallthrough
+ case glslang::EOpAtomicLoad:
+ atomic = true;
+ break;
+
case glslang::EOpAtomicCounterAdd:
case glslang::EOpAtomicCounterSubtract:
case glslang::EOpAtomicCounterMin:
@@ -2379,7 +2622,17 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
atomic = true;
break;
-#ifdef NV_EXTENSIONS
+ case glslang::EOpAbsDifference:
+ case glslang::EOpAddSaturate:
+ case glslang::EOpSubSaturate:
+ case glslang::EOpAverage:
+ case glslang::EOpAverageRounded:
+ case glslang::EOpMul32x16:
+ builder.addCapability(spv::CapabilityIntegerFunctions2INTEL);
+ builder.addExtension("SPV_INTEL_shader_integer_functions2");
+ binOp = node->getOp();
+ break;
+
case glslang::EOpIgnoreIntersectionNV:
case glslang::EOpTerminateRayNV:
case glslang::EOpTraceNV:
@@ -2387,11 +2640,16 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
case glslang::EOpWritePackedPrimitiveIndices4x8NV:
noReturnValue = true;
break;
-#endif
case glslang::EOpCooperativeMatrixLoad:
case glslang::EOpCooperativeMatrixStore:
noReturnValue = true;
break;
+ case glslang::EOpBeginInvocationInterlock:
+ case glslang::EOpEndInvocationInterlock:
+ builder.addExtension(spv::E_SPV_EXT_fragment_shader_interlock);
+ noReturnValue = true;
+ break;
+#endif
default:
break;
@@ -2439,16 +2697,31 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
// special case l-value operands; there are just a few
bool lvalue = false;
switch (node->getOp()) {
- case glslang::EOpFrexp:
case glslang::EOpModf:
if (arg == 1)
lvalue = true;
break;
+
+ case glslang::EOpAtomicAdd:
+ case glslang::EOpAtomicMin:
+ case glslang::EOpAtomicMax:
+ case glslang::EOpAtomicAnd:
+ case glslang::EOpAtomicOr:
+ case glslang::EOpAtomicXor:
+ case glslang::EOpAtomicExchange:
+ case glslang::EOpAtomicCompSwap:
+ if (arg == 0)
+ lvalue = true;
+ break;
+
+#ifndef GLSLANG_WEB
+ case glslang::EOpFrexp:
+ if (arg == 1)
+ lvalue = true;
+ break;
case glslang::EOpInterpolateAtSample:
case glslang::EOpInterpolateAtOffset:
-#ifdef AMD_EXTENSIONS
case glslang::EOpInterpolateAtVertex:
-#endif
if (arg == 0) {
lvalue = true;
@@ -2459,14 +2732,6 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
invertedType = convertGlslangToSpvType(glslangOperands[0]->getAsBinaryNode()->getLeft()->getType());
}
break;
- case glslang::EOpAtomicAdd:
- case glslang::EOpAtomicMin:
- case glslang::EOpAtomicMax:
- case glslang::EOpAtomicAnd:
- case glslang::EOpAtomicOr:
- case glslang::EOpAtomicXor:
- case glslang::EOpAtomicExchange:
- case glslang::EOpAtomicCompSwap:
case glslang::EOpAtomicLoad:
case glslang::EOpAtomicStore:
case glslang::EOpAtomicCounterAdd:
@@ -2499,6 +2764,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
if (arg == 1)
lvalue = true;
break;
+#endif
default:
break;
}
@@ -2508,6 +2774,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
else
glslangOperands[arg]->traverse(this);
+#ifndef GLSLANG_WEB
if (node->getOp() == glslang::EOpCooperativeMatrixLoad ||
node->getOp() == glslang::EOpCooperativeMatrixStore) {
@@ -2550,16 +2817,20 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
continue;
}
}
+#endif
- if (lvalue)
+ if (lvalue) {
operands.push_back(builder.accessChainGetLValue());
- else {
+ lvalueCoherentFlags = builder.getAccessChain().coherentFlags;
+ lvalueCoherentFlags |= TranslateCoherent(glslangOperands[arg]->getAsTyped()->getType());
+ } else {
builder.setLine(node->getLoc().line, node->getLoc().getFilename());
operands.push_back(accessChainLoad(glslangOperands[arg]->getAsTyped()->getType()));
}
}
builder.setLine(node->getLoc().line, node->getLoc().getFilename());
+#ifndef GLSLANG_WEB
if (node->getOp() == glslang::EOpCooperativeMatrixLoad) {
std::vector<spv::IdImmediate> idImmOps;
@@ -2586,9 +2857,11 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
builder.createNoResultOp(spv::OpCooperativeMatrixStoreNV, idImmOps);
result = 0;
- } else if (atomic) {
+ } else
+#endif
+ if (atomic) {
// Handle all atomics
- result = createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType());
+ result = createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(), lvalueCoherentFlags);
} else {
// Pass through to generic operations.
switch (glslangOperands.size()) {
@@ -2603,7 +2876,7 @@ bool TGlslangToSpvTraverser::visitAggregate(glslang::TVisit visit, glslang::TInt
result = createUnaryOperation(
node->getOp(), decorations,
resultType(), operands.front(),
- glslangOperands[0]->getAsTyped()->getBasicType());
+ glslangOperands[0]->getAsTyped()->getBasicType(), lvalueCoherentFlags);
}
break;
default:
@@ -2965,6 +3238,14 @@ bool TGlslangToSpvTraverser::visitBranch(glslang::TVisit /* visit */, glslang::T
builder.clearAccessChain();
break;
+#ifndef GLSLANG_WEB
+ case glslang::EOpDemote:
+ builder.createNoResultOp(spv::OpDemoteToHelperInvocationEXT);
+ builder.addExtension(spv::E_SPV_EXT_demote_to_helper_invocation);
+ builder.addCapability(spv::CapabilityDemoteToHelperInvocationEXT);
+ break;
+#endif
+
default:
assert(0);
break;
@@ -2973,7 +3254,7 @@ bool TGlslangToSpvTraverser::visitBranch(glslang::TVisit /* visit */, glslang::T
return false;
}
-spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol* node)
+spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol* node, spv::Id forcedType)
{
// First, steer off constants, which are not SPIR-V variables, but
// can still have a mapping to a SPIR-V Id.
@@ -2986,51 +3267,57 @@ spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol*
// Now, handle actual variables
spv::StorageClass storageClass = TranslateStorageClass(node->getType());
- spv::Id spvType = convertGlslangToSpvType(node->getType());
+ spv::Id spvType = forcedType == spv::NoType ? convertGlslangToSpvType(node->getType())
+ : forcedType;
- const bool contains16BitType = node->getType().containsBasicType(glslang::EbtFloat16) ||
- node->getType().containsBasicType(glslang::EbtInt16) ||
- node->getType().containsBasicType(glslang::EbtUint16);
+ const bool contains16BitType = node->getType().contains16BitFloat() ||
+ node->getType().contains16BitInt();
if (contains16BitType) {
switch (storageClass) {
case spv::StorageClassInput:
case spv::StorageClassOutput:
- addPre13Extension(spv::E_SPV_KHR_16bit_storage);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_16bit_storage, spv::Spv_1_3);
builder.addCapability(spv::CapabilityStorageInputOutput16);
break;
- case spv::StorageClassPushConstant:
- addPre13Extension(spv::E_SPV_KHR_16bit_storage);
- builder.addCapability(spv::CapabilityStoragePushConstant16);
- break;
case spv::StorageClassUniform:
- addPre13Extension(spv::E_SPV_KHR_16bit_storage);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_16bit_storage, spv::Spv_1_3);
if (node->getType().getQualifier().storage == glslang::EvqBuffer)
builder.addCapability(spv::CapabilityStorageUniformBufferBlock16);
else
builder.addCapability(spv::CapabilityStorageUniform16);
break;
+#ifndef GLSLANG_WEB
+ case spv::StorageClassPushConstant:
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_16bit_storage, spv::Spv_1_3);
+ builder.addCapability(spv::CapabilityStoragePushConstant16);
+ break;
case spv::StorageClassStorageBuffer:
case spv::StorageClassPhysicalStorageBufferEXT:
- addPre13Extension(spv::E_SPV_KHR_16bit_storage);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_16bit_storage, spv::Spv_1_3);
builder.addCapability(spv::CapabilityStorageUniformBufferBlock16);
break;
+#endif
default:
+ if (node->getType().contains16BitFloat())
+ builder.addCapability(spv::CapabilityFloat16);
+ if (node->getType().contains16BitInt())
+ builder.addCapability(spv::CapabilityInt16);
break;
}
}
- const bool contains8BitType = node->getType().containsBasicType(glslang::EbtInt8) ||
- node->getType().containsBasicType(glslang::EbtUint8);
- if (contains8BitType) {
+ if (node->getType().contains8BitInt()) {
if (storageClass == spv::StorageClassPushConstant) {
- builder.addExtension(spv::E_SPV_KHR_8bit_storage);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_8bit_storage, spv::Spv_1_5);
builder.addCapability(spv::CapabilityStoragePushConstant8);
} else if (storageClass == spv::StorageClassUniform) {
- builder.addExtension(spv::E_SPV_KHR_8bit_storage);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_8bit_storage, spv::Spv_1_5);
builder.addCapability(spv::CapabilityUniformAndStorageBuffer8BitAccess);
} else if (storageClass == spv::StorageClassStorageBuffer) {
- builder.addExtension(spv::E_SPV_KHR_8bit_storage);
+ builder.addIncorporatedExtension(spv::E_SPV_KHR_8bit_storage, spv::Spv_1_5);
builder.addCapability(spv::CapabilityStorageBuffer8BitAccess);
+ } else {
+ builder.addCapability(spv::CapabilityInt8);
}
}
@@ -3045,15 +3332,15 @@ spv::Id TGlslangToSpvTraverser::createSpvVariable(const glslang::TIntermSymbol*
spv::Id TGlslangToSpvTraverser::getSampledType(const glslang::TSampler& sampler)
{
switch (sampler.type) {
+ case glslang::EbtInt: return builder.makeIntType(32);
+ case glslang::EbtUint: return builder.makeUintType(32);
case glslang::EbtFloat: return builder.makeFloatType(32);
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
case glslang::EbtFloat16:
builder.addExtension(spv::E_SPV_AMD_gpu_shader_half_float_fetch);
builder.addCapability(spv::CapabilityFloat16ImageAMD);
return builder.makeFloatType(16);
#endif
- case glslang::EbtInt: return builder.makeIntType(32);
- case glslang::EbtUint: return builder.makeUintType(32);
default:
assert(0);
return builder.makeFloatType(32);
@@ -3111,23 +3398,30 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
spvType = builder.makeVoidType();
assert (! type.isArray());
break;
+ case glslang::EbtBool:
+ // "transparent" bool doesn't exist in SPIR-V. The GLSL convention is
+ // a 32-bit int where non-0 means true.
+ if (explicitLayout != glslang::ElpNone)
+ spvType = builder.makeUintType(32);
+ else
+ spvType = builder.makeBoolType();
+ break;
+ case glslang::EbtInt:
+ spvType = builder.makeIntType(32);
+ break;
+ case glslang::EbtUint:
+ spvType = builder.makeUintType(32);
+ break;
case glslang::EbtFloat:
spvType = builder.makeFloatType(32);
break;
+#ifndef GLSLANG_WEB
case glslang::EbtDouble:
spvType = builder.makeFloatType(64);
break;
case glslang::EbtFloat16:
spvType = builder.makeFloatType(16);
break;
- case glslang::EbtBool:
- // "transparent" bool doesn't exist in SPIR-V. The GLSL convention is
- // a 32-bit int where non-0 means true.
- if (explicitLayout != glslang::ElpNone)
- spvType = builder.makeUintType(32);
- else
- spvType = builder.makeBoolType();
- break;
case glslang::EbtInt8:
spvType = builder.makeIntType(8);
break;
@@ -3140,12 +3434,6 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
case glslang::EbtUint16:
spvType = builder.makeUintType(16);
break;
- case glslang::EbtInt:
- spvType = builder.makeIntType(32);
- break;
- case glslang::EbtUint:
- spvType = builder.makeUintType(32);
- break;
case glslang::EbtInt64:
spvType = builder.makeIntType(64);
break;
@@ -3156,22 +3444,38 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
builder.addCapability(spv::CapabilityAtomicStorage);
spvType = builder.makeUintType(32);
break;
-#ifdef NV_EXTENSIONS
case glslang::EbtAccStructNV:
spvType = builder.makeAccelerationStructureNVType();
break;
+ case glslang::EbtReference:
+ {
+ // Make the forward pointer, then recurse to convert the structure type, then
+ // patch up the forward pointer with a real pointer type.
+ if (forwardPointers.find(type.getReferentType()) == forwardPointers.end()) {
+ spv::Id forwardId = builder.makeForwardPointer(spv::StorageClassPhysicalStorageBufferEXT);
+ forwardPointers[type.getReferentType()] = forwardId;
+ }
+ spvType = forwardPointers[type.getReferentType()];
+ if (!forwardReferenceOnly) {
+ spv::Id referentType = convertGlslangToSpvType(*type.getReferentType());
+ builder.makePointerFromForwardPointer(spv::StorageClassPhysicalStorageBufferEXT,
+ forwardPointers[type.getReferentType()],
+ referentType);
+ }
+ }
+ break;
#endif
case glslang::EbtSampler:
{
const glslang::TSampler& sampler = type.getSampler();
- if (sampler.sampler) {
- // pure sampler
+ if (sampler.isPureSampler()) {
spvType = builder.makeSamplerType();
} else {
// an image is present, make its type
- spvType = builder.makeImageType(getSampledType(sampler), TranslateDimensionality(sampler), sampler.shadow, sampler.arrayed, sampler.ms,
- sampler.image ? 2 : 1, TranslateImageFormat(type));
- if (sampler.combined) {
+ spvType = builder.makeImageType(getSampledType(sampler), TranslateDimensionality(sampler),
+ sampler.isShadow(), sampler.isArrayed(), sampler.isMultiSample(),
+ sampler.isImageClass() ? 2 : 1, TranslateImageFormat(type));
+ if (sampler.isCombined()) {
// already has both image and sampler, make the combined type
spvType = builder.makeSampledImageType(spvType);
}
@@ -3193,27 +3497,10 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
// else, we haven't seen it...
if (type.getBasicType() == glslang::EbtBlock)
- memberRemapper[glslangMembers].resize(glslangMembers->size());
+ memberRemapper[glslangTypeToIdMap[glslangMembers]].resize(glslangMembers->size());
spvType = convertGlslangStructToSpvType(type, glslangMembers, explicitLayout, qualifier);
}
break;
- case glslang::EbtReference:
- {
- // Make the forward pointer, then recurse to convert the structure type, then
- // patch up the forward pointer with a real pointer type.
- if (forwardPointers.find(type.getReferentType()) == forwardPointers.end()) {
- spv::Id forwardId = builder.makeForwardPointer(spv::StorageClassPhysicalStorageBufferEXT);
- forwardPointers[type.getReferentType()] = forwardId;
- }
- spvType = forwardPointers[type.getReferentType()];
- if (!forwardReferenceOnly) {
- spv::Id referentType = convertGlslangToSpvType(*type.getReferentType());
- builder.makePointerFromForwardPointer(spv::StorageClassPhysicalStorageBufferEXT,
- forwardPointers[type.getReferentType()],
- referentType);
- }
- }
- break;
default:
assert(0);
break;
@@ -3232,6 +3519,10 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
builder.addExtension(spv::E_SPV_NV_cooperative_matrix);
if (type.getBasicType() == glslang::EbtFloat16)
builder.addCapability(spv::CapabilityFloat16);
+ if (type.getBasicType() == glslang::EbtUint8 ||
+ type.getBasicType() == glslang::EbtInt8) {
+ builder.addCapability(spv::CapabilityInt8);
+ }
spv::Id scope = makeArraySizeId(*type.getTypeParameters(), 1);
spv::Id rows = makeArraySizeId(*type.getTypeParameters(), 2);
@@ -3278,10 +3569,12 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
if (type.isSizedArray())
spvType = builder.makeArrayType(spvType, makeArraySizeId(*type.getArraySizes(), 0), stride);
else {
+#ifndef GLSLANG_WEB
if (!lastBufferBlockMember) {
- builder.addExtension("SPV_EXT_descriptor_indexing");
+ builder.addIncorporatedExtension("SPV_EXT_descriptor_indexing", spv::Spv_1_5);
builder.addCapability(spv::CapabilityRuntimeDescriptorArrayEXT);
}
+#endif
spvType = builder.makeRuntimeArray(spvType);
}
if (stride > 0)
@@ -3297,7 +3590,7 @@ spv::Id TGlslangToSpvTraverser::convertGlslangToSpvType(const glslang::TType& ty
//
bool TGlslangToSpvTraverser::filterMember(const glslang::TType& member)
{
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
auto& extensions = glslangIntermediate->getRequestedExtensions();
if (member.getFieldName() == "gl_SecondaryViewportMaskNV" &&
@@ -3340,12 +3633,15 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy
if (glslangMember.hiddenMember()) {
++memberDelta;
if (type.getBasicType() == glslang::EbtBlock)
- memberRemapper[glslangMembers][i] = -1;
+ memberRemapper[glslangTypeToIdMap[glslangMembers]][i] = -1;
} else {
if (type.getBasicType() == glslang::EbtBlock) {
- memberRemapper[glslangMembers][i] = i - memberDelta;
- if (filterMember(glslangMember))
+ if (filterMember(glslangMember)) {
+ memberDelta++;
+ memberRemapper[glslangTypeToIdMap[glslangMembers]][i] = -1;
continue;
+ }
+ memberRemapper[glslangTypeToIdMap[glslangMembers]][i] = i - memberDelta;
}
// modify just this child's view of the qualifier
glslang::TQualifier memberQualifier = glslangMember.getQualifier();
@@ -3361,7 +3657,7 @@ spv::Id TGlslangToSpvTraverser::convertGlslangStructToSpvType(const glslang::TTy
// Make forward pointers for any pointer members, and create a list of members to
// convert to spirv types after creating the struct.
- if (glslangMember.getBasicType() == glslang::EbtReference) {
+ if (glslangMember.isReference()) {
if (forwardPointers.find(glslangMember.getReferentType()) == forwardPointers.end()) {
deferredForwardPointers.push_back(std::make_pair(&glslangMember, memberQualifier));
}
@@ -3403,7 +3699,7 @@ void TGlslangToSpvTraverser::decorateStructType(const glslang::TType& type,
glslang::TType& glslangMember = *(*glslangMembers)[i].type;
int member = i;
if (type.getBasicType() == glslang::EbtBlock) {
- member = memberRemapper[glslangMembers][i];
+ member = memberRemapper[glslangTypeToIdMap[glslangMembers]][i];
if (filterMember(glslangMember))
continue;
}
@@ -3428,13 +3724,14 @@ void TGlslangToSpvTraverser::decorateStructType(const glslang::TType& type,
glslangIntermediate->getSource() == glslang::EShSourceHlsl) {
builder.addMemberDecoration(spvType, member, TranslateInterpolationDecoration(memberQualifier));
builder.addMemberDecoration(spvType, member, TranslateAuxiliaryStorageDecoration(memberQualifier));
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
addMeshNVDecoration(spvType, member, memberQualifier);
#endif
}
}
builder.addMemberDecoration(spvType, member, TranslateInvariantDecoration(memberQualifier));
+#ifndef GLSLANG_WEB
if (type.getBasicType() == glslang::EbtBlock &&
qualifier.storage == glslang::EvqBuffer) {
// Add memory decorations only to top-level members of shader storage block
@@ -3444,6 +3741,8 @@ void TGlslangToSpvTraverser::decorateStructType(const glslang::TType& type,
builder.addMemberDecoration(spvType, member, memory[i]);
}
+#endif
+
// Location assignment was already completed correctly by the front end,
// just track whether a member needs to be decorated.
// Ignore member locations if the container is an array, as that's
@@ -3480,6 +3779,7 @@ void TGlslangToSpvTraverser::decorateStructType(const glslang::TType& type,
if (builtIn != spv::BuiltInMax)
builder.addMemberDecoration(spvType, member, spv::DecorationBuiltIn, (int)builtIn);
+#ifndef GLSLANG_WEB
// nonuniform
builder.addMemberDecoration(spvType, member, TranslateNonUniformDecoration(glslangMember.getQualifier()));
@@ -3489,7 +3789,6 @@ void TGlslangToSpvTraverser::decorateStructType(const glslang::TType& type,
memberQualifier.semanticName);
}
-#ifdef NV_EXTENSIONS
if (builtIn == spv::BuiltInLayer) {
// SPV_NV_viewport_array2 extension
if (glslangMember.getQualifier().layoutViewportRelative){
@@ -3828,10 +4127,10 @@ void TGlslangToSpvTraverser::declareUseOfStructMember(const glslang::TTypeList&
const glslang::TBuiltInVariable glslangBuiltIn = members[glslangMember].type->getQualifier().builtIn;
switch (glslangBuiltIn)
{
+ case glslang::EbvPointSize:
+#ifndef GLSLANG_WEB
case glslang::EbvClipDistance:
case glslang::EbvCullDistance:
- case glslang::EbvPointSize:
-#ifdef NV_EXTENSIONS
case glslang::EbvViewportMaskNV:
case glslang::EbvSecondaryPositionNV:
case glslang::EbvSecondaryViewportMaskNV:
@@ -3894,15 +4193,17 @@ void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslF
if (paramPrecision != spv::NoPrecision)
decorations.push_back(paramPrecision);
TranslateMemoryDecoration(type.getQualifier(), decorations, useVulkanMemoryModel);
- if (type.getBasicType() == glslang::EbtReference) {
+ if (type.isReference()) {
// Original and non-writable params pass the pointer directly and
// use restrict/aliased, others are stored to a pointer in Function
// memory and use RestrictPointer/AliasedPointer.
if (originalParam(type.getQualifier().storage, type, false) ||
!writableParam(type.getQualifier().storage)) {
- decorations.push_back(type.getQualifier().restrict ? spv::DecorationRestrict : spv::DecorationAliased);
+ decorations.push_back(type.getQualifier().isRestrict() ? spv::DecorationRestrict :
+ spv::DecorationAliased);
} else {
- decorations.push_back(type.getQualifier().restrict ? spv::DecorationRestrictPointerEXT : spv::DecorationAliasedPointerEXT);
+ decorations.push_back(type.getQualifier().isRestrict() ? spv::DecorationRestrictPointerEXT :
+ spv::DecorationAliasedPointerEXT);
}
}
};
@@ -3930,8 +4231,12 @@ void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslF
std::vector<std::vector<spv::Decoration>> paramDecorations; // list of decorations per parameter
glslang::TIntermSequence& parameters = glslFunction->getSequence()[0]->getAsAggregate()->getSequence();
+#ifdef ENABLE_HLSL
bool implicitThis = (int)parameters.size() > 0 && parameters[0]->getAsSymbolNode()->getName() ==
glslangIntermediate->implicitThisName;
+#else
+ bool implicitThis = false;
+#endif
paramDecorations.resize(parameters.size());
for (int p = 0; p < (int)parameters.size(); ++p) {
@@ -3963,6 +4268,14 @@ void TGlslangToSpvTraverser::makeFunctions(const glslang::TIntermSequence& glslF
symbolValues[parameters[p]->getAsSymbolNode()->getId()] = function->getParamId(p);
// give a name too
builder.addName(function->getParamId(p), parameters[p]->getAsSymbolNode()->getName().c_str());
+
+ const glslang::TType& paramType = parameters[p]->getAsTyped()->getType();
+ if (paramType.contains8BitInt())
+ builder.addCapability(spv::CapabilityInt8);
+ if (paramType.contains16BitInt())
+ builder.addCapability(spv::CapabilityInt16);
+ if (paramType.contains16BitFloat())
+ builder.addCapability(spv::CapabilityFloat16);
}
}
}
@@ -4001,19 +4314,19 @@ void TGlslangToSpvTraverser::handleFunctionEntry(const glslang::TIntermAggregate
builder.setBuildPoint(functionBlock);
}
-void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& node, std::vector<spv::Id>& arguments)
+void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate& node, std::vector<spv::Id>& arguments, spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags)
{
const glslang::TIntermSequence& glslangArguments = node.getSequence();
glslang::TSampler sampler = {};
bool cubeCompare = false;
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
bool f16ShadowCompare = false;
#endif
if (node.isTexture() || node.isImage()) {
sampler = glslangArguments[0]->getAsTyped()->getType().getSampler();
cubeCompare = sampler.dim == glslang::EsdCube && sampler.arrayed && sampler.shadow;
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
f16ShadowCompare = sampler.shadow && glslangArguments[1]->getAsTyped()->getType().getBasicType() == glslang::EbtFloat16;
#endif
}
@@ -4022,6 +4335,7 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
builder.clearAccessChain();
glslangArguments[i]->traverse(this);
+#ifndef GLSLANG_WEB
// Special case l-value operands
bool lvalue = false;
switch (node.getOp()) {
@@ -4042,7 +4356,6 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if ((sampler.ms && i == 3) || (! sampler.ms && i == 2))
lvalue = true;
break;
-#ifdef AMD_EXTENSIONS
case glslang::EOpSparseTexture:
if (((cubeCompare || f16ShadowCompare) && i == 3) || (! (cubeCompare || f16ShadowCompare) && i == 2))
lvalue = true;
@@ -4056,21 +4369,6 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if ((f16ShadowCompare && i == 4) || (! f16ShadowCompare && i == 3))
lvalue = true;
break;
-#else
- case glslang::EOpSparseTexture:
- if ((cubeCompare && i == 3) || (! cubeCompare && i == 2))
- lvalue = true;
- break;
- case glslang::EOpSparseTextureClamp:
- if ((cubeCompare && i == 4) || (! cubeCompare && i == 3))
- lvalue = true;
- break;
- case glslang::EOpSparseTextureLod:
- case glslang::EOpSparseTextureOffset:
- if (i == 3)
- lvalue = true;
- break;
-#endif
case glslang::EOpSparseTextureFetch:
if ((sampler.dim != glslang::EsdRect && i == 3) || (sampler.dim == glslang::EsdRect && i == 2))
lvalue = true;
@@ -4079,7 +4377,6 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if ((sampler.dim != glslang::EsdRect && i == 4) || (sampler.dim == glslang::EsdRect && i == 3))
lvalue = true;
break;
-#ifdef AMD_EXTENSIONS
case glslang::EOpSparseTextureLodOffset:
case glslang::EOpSparseTextureGrad:
case glslang::EOpSparseTextureOffsetClamp:
@@ -4095,23 +4392,6 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if ((f16ShadowCompare && i == 7) || (! f16ShadowCompare && i == 6))
lvalue = true;
break;
-#else
- case glslang::EOpSparseTextureLodOffset:
- case glslang::EOpSparseTextureGrad:
- case glslang::EOpSparseTextureOffsetClamp:
- if (i == 4)
- lvalue = true;
- break;
- case glslang::EOpSparseTextureGradOffset:
- case glslang::EOpSparseTextureGradClamp:
- if (i == 5)
- lvalue = true;
- break;
- case glslang::EOpSparseTextureGradOffsetClamp:
- if (i == 6)
- lvalue = true;
- break;
-#endif
case glslang::EOpSparseTextureGather:
if ((sampler.shadow && i == 3) || (! sampler.shadow && i == 2))
lvalue = true;
@@ -4121,7 +4401,6 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if ((sampler.shadow && i == 4) || (! sampler.shadow && i == 3))
lvalue = true;
break;
-#ifdef AMD_EXTENSIONS
case glslang::EOpSparseTextureGatherLod:
if (i == 3)
lvalue = true;
@@ -4135,8 +4414,6 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if (i == 3)
lvalue = true;
break;
-#endif
-#ifdef NV_EXTENSIONS
case glslang::EOpImageSampleFootprintNV:
if (i == 4)
lvalue = true;
@@ -4154,14 +4431,16 @@ void TGlslangToSpvTraverser::translateArguments(const glslang::TIntermAggregate&
if (i == 7)
lvalue = true;
break;
-#endif
default:
break;
}
- if (lvalue)
+ if (lvalue) {
arguments.push_back(builder.accessChainGetLValue());
- else
+ lvalueCoherentFlags = builder.getAccessChain().coherentFlags;
+ lvalueCoherentFlags |= TranslateCoherent(glslangArguments[i]->getAsTyped()->getType());
+ } else
+#endif
arguments.push_back(accessChainLoad(glslangArguments[i]->getAsTyped()->getType()));
}
}
@@ -4186,7 +4465,9 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
? node->getAsAggregate()->getSequence()[0]->getAsTyped()->getType()
: node->getAsUnaryNode()->getOperand()->getAsTyped()->getType();
const glslang::TSampler sampler = imageType.getSampler();
-#ifdef AMD_EXTENSIONS
+#ifdef GLSLANG_WEB
+ const bool f16ShadowCompare = false;
+#else
bool f16ShadowCompare = (sampler.shadow && node->getAsAggregate())
? node->getAsAggregate()->getSequence()[1]->getAsTyped()->getType().getBasicType() == glslang::EbtFloat16
: false;
@@ -4202,9 +4483,11 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
return spv::ImageOperandsMaskNone;
};
+ spv::Builder::AccessChain::CoherentFlags lvalueCoherentFlags;
+
std::vector<spv::Id> arguments;
if (node->getAsAggregate())
- translateArguments(*node->getAsAggregate(), arguments);
+ translateArguments(*node->getAsAggregate(), arguments, lvalueCoherentFlags);
else
translateArguments(*node->getAsUnaryNode(), arguments);
spv::Decoration precision = TranslatePrecisionDecoration(node->getOperationPrecision());
@@ -4231,6 +4514,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
return builder.createTextureQueryCall(spv::OpImageQuerySizeLod, params, isUnsignedResult);
} else
return builder.createTextureQueryCall(spv::OpImageQuerySize, params, isUnsignedResult);
+#ifndef GLSLANG_WEB
case glslang::EOpImageQuerySamples:
case glslang::EOpTextureQuerySamples:
return builder.createTextureQueryCall(spv::OpImageQuerySamples, params, isUnsignedResult);
@@ -4241,6 +4525,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
return builder.createTextureQueryCall(spv::OpImageQueryLevels, params, isUnsignedResult);
case glslang::EOpSparseTexelsResident:
return builder.createUnaryOp(spv::OpImageSparseTexelsResident, builder.makeBoolType(), arguments[0]);
+#endif
default:
assert(0);
break;
@@ -4282,12 +4567,12 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
operands.push_back(coord);
spv::IdImmediate imageOperands = { false, spv::ImageOperandsMaskNone };
imageOperands.word = imageOperands.word | signExtensionMask();
- if (sampler.ms) {
+ if (sampler.isMultiSample()) {
imageOperands.word = imageOperands.word | spv::ImageOperandsSampleMask;
}
if (imageOperands.word != spv::ImageOperandsMaskNone) {
operands.push_back(imageOperands);
- if (sampler.ms) {
+ if (sampler.isMultiSample()) {
spv::IdImmediate imageOperand = { true, *(opIt++) };
operands.push_back(imageOperand);
}
@@ -4299,22 +4584,16 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
spv::IdImmediate coord = { true, *(opIt++) };
operands.push_back(coord);
-#ifdef AMD_EXTENSIONS
if (node->getOp() == glslang::EOpImageLoad || node->getOp() == glslang::EOpImageLoadLod) {
-#else
- if (node->getOp() == glslang::EOpImageLoad) {
-#endif
spv::ImageOperandsMask mask = spv::ImageOperandsMaskNone;
- if (sampler.ms) {
+ if (sampler.isMultiSample()) {
mask = mask | spv::ImageOperandsSampleMask;
}
-#ifdef AMD_EXTENSIONS
if (cracked.lod) {
builder.addExtension(spv::E_SPV_AMD_shader_image_load_store_lod);
builder.addCapability(spv::CapabilityImageReadWriteLodAMD);
mask = mask | spv::ImageOperandsLodMask;
}
-#endif
mask = mask | TranslateImageOperands(TranslateCoherent(imageType));
mask = (spv::ImageOperandsMask)(mask & ~spv::ImageOperandsMakeTexelAvailableKHRMask);
mask = mask | signExtensionMask();
@@ -4326,12 +4605,10 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
spv::IdImmediate imageOperand = { true, *opIt++ };
operands.push_back(imageOperand);
}
-#ifdef AMD_EXTENSIONS
if (mask & spv::ImageOperandsLodMask) {
spv::IdImmediate imageOperand = { true, *opIt++ };
operands.push_back(imageOperand);
}
-#endif
if (mask & spv::ImageOperandsMakeTexelVisibleKHRMask) {
spv::IdImmediate imageOperand = { true,
builder.makeUintConstant(TranslateMemoryScope(TranslateCoherent(imageType))) };
@@ -4349,18 +4626,10 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
result[0] = builder.createConstructor(precision, result, convertGlslangToSpvType(node->getType()));
return result[0];
-#ifdef AMD_EXTENSIONS
} else if (node->getOp() == glslang::EOpImageStore || node->getOp() == glslang::EOpImageStoreLod) {
-#else
- } else if (node->getOp() == glslang::EOpImageStore) {
-#endif
// Push the texel value before the operands
-#ifdef AMD_EXTENSIONS
- if (sampler.ms || cracked.lod) {
-#else
- if (sampler.ms) {
-#endif
+ if (sampler.isMultiSample() || cracked.lod) {
spv::IdImmediate texel = { true, *(opIt + 1) };
operands.push_back(texel);
} else {
@@ -4369,16 +4638,14 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
}
spv::ImageOperandsMask mask = spv::ImageOperandsMaskNone;
- if (sampler.ms) {
+ if (sampler.isMultiSample()) {
mask = mask | spv::ImageOperandsSampleMask;
}
-#ifdef AMD_EXTENSIONS
if (cracked.lod) {
builder.addExtension(spv::E_SPV_AMD_shader_image_load_store_lod);
builder.addCapability(spv::CapabilityImageReadWriteLodAMD);
mask = mask | spv::ImageOperandsLodMask;
}
-#endif
mask = mask | TranslateImageOperands(TranslateCoherent(imageType));
mask = (spv::ImageOperandsMask)(mask & ~spv::ImageOperandsMakeTexelVisibleKHRMask);
mask = mask | signExtensionMask();
@@ -4390,12 +4657,10 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
spv::IdImmediate imageOperand = { true, *opIt++ };
operands.push_back(imageOperand);
}
-#ifdef AMD_EXTENSIONS
if (mask & spv::ImageOperandsLodMask) {
spv::IdImmediate imageOperand = { true, *opIt++ };
operands.push_back(imageOperand);
}
-#endif
if (mask & spv::ImageOperandsMakeTexelAvailableKHRMask) {
spv::IdImmediate imageOperand = { true,
builder.makeUintConstant(TranslateMemoryScope(TranslateCoherent(imageType))) };
@@ -4406,28 +4671,22 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
if (builder.getImageTypeFormat(builder.getImageType(operands.front().word)) == spv::ImageFormatUnknown)
builder.addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
return spv::NoResult;
-#ifdef AMD_EXTENSIONS
} else if (node->getOp() == glslang::EOpSparseImageLoad ||
node->getOp() == glslang::EOpSparseImageLoadLod) {
-#else
- } else if (node->getOp() == glslang::EOpSparseImageLoad) {
-#endif
builder.addCapability(spv::CapabilitySparseResidency);
if (builder.getImageTypeFormat(builder.getImageType(operands.front().word)) == spv::ImageFormatUnknown)
builder.addCapability(spv::CapabilityStorageImageReadWithoutFormat);
spv::ImageOperandsMask mask = spv::ImageOperandsMaskNone;
- if (sampler.ms) {
+ if (sampler.isMultiSample()) {
mask = mask | spv::ImageOperandsSampleMask;
}
-#ifdef AMD_EXTENSIONS
if (cracked.lod) {
builder.addExtension(spv::E_SPV_AMD_shader_image_load_store_lod);
builder.addCapability(spv::CapabilityImageReadWriteLodAMD);
mask = mask | spv::ImageOperandsLodMask;
}
-#endif
mask = mask | TranslateImageOperands(TranslateCoherent(imageType));
mask = (spv::ImageOperandsMask)(mask & ~spv::ImageOperandsMakeTexelAvailableKHRMask);
mask = mask | signExtensionMask();
@@ -4439,12 +4698,10 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
spv::IdImmediate imageOperand = { true, *opIt++ };
operands.push_back(imageOperand);
}
-#ifdef AMD_EXTENSIONS
if (mask & spv::ImageOperandsLodMask) {
spv::IdImmediate imageOperand = { true, *opIt++ };
operands.push_back(imageOperand);
}
-#endif
if (mask & spv::ImageOperandsMakeTexelVisibleKHRMask) {
spv::IdImmediate imageOperand = { true, builder.makeUintConstant(TranslateMemoryScope(TranslateCoherent(imageType))) };
operands.push_back(imageOperand);
@@ -4467,7 +4724,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
// GLSL "IMAGE_PARAMS" will involve in constructing an image texel pointer and this pointer,
// as the first source operand, is required by SPIR-V atomic operations.
// For non-MS, the sample value should be 0
- spv::IdImmediate sample = { true, sampler.ms ? *(opIt++) : builder.makeUintConstant(0) };
+ spv::IdImmediate sample = { true, sampler.isMultiSample() ? *(opIt++) : builder.makeUintConstant(0) };
operands.push_back(sample);
spv::Id resultTypeId;
@@ -4485,11 +4742,11 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
for (; opIt != arguments.end(); ++opIt)
operands.push_back(*opIt);
- return createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType());
+ return createAtomicOperation(node->getOp(), precision, resultType(), operands, node->getBasicType(), lvalueCoherentFlags);
}
}
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
// Check for fragment mask functions other than queries
if (cracked.fragMask) {
assert(sampler.ms);
@@ -4530,45 +4787,32 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
// Check for texture functions other than queries
bool sparse = node->isSparseTexture();
-#ifdef NV_EXTENSIONS
bool imageFootprint = node->isImageFootprint();
-#endif
-
- bool cubeCompare = sampler.dim == glslang::EsdCube && sampler.arrayed && sampler.shadow;
+ bool cubeCompare = sampler.dim == glslang::EsdCube && sampler.isArrayed() && sampler.isShadow();
// check for bias argument
bool bias = false;
-#ifdef AMD_EXTENSIONS
if (! cracked.lod && ! cracked.grad && ! cracked.fetch && ! cubeCompare) {
-#else
- if (! cracked.lod && ! cracked.gather && ! cracked.grad && ! cracked.fetch && ! cubeCompare) {
-#endif
int nonBiasArgCount = 2;
-#ifdef AMD_EXTENSIONS
if (cracked.gather)
++nonBiasArgCount; // comp argument should be present when bias argument is present
if (f16ShadowCompare)
++nonBiasArgCount;
-#endif
if (cracked.offset)
++nonBiasArgCount;
-#ifdef AMD_EXTENSIONS
else if (cracked.offsets)
++nonBiasArgCount;
-#endif
if (cracked.grad)
nonBiasArgCount += 2;
if (cracked.lodClamp)
++nonBiasArgCount;
if (sparse)
++nonBiasArgCount;
-#ifdef NV_EXTENSIONS
if (imageFootprint)
//Following three extra arguments
// int granularity, bool coarse, out gl_TextureFootprint2DNV footprint
nonBiasArgCount += 3;
-#endif
if ((int)arguments.size() > nonBiasArgCount)
bias = true;
}
@@ -4580,7 +4824,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
params.sampler = builder.createUnaryOp(spv::OpImage, builder.getImageType(params.sampler), params.sampler);
}
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
if (cracked.gather) {
const auto& sourceExtensions = glslangIntermediate->getRequestedExtensions();
if (bias || cracked.lod ||
@@ -4598,11 +4842,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
bool noImplicitLod = false;
// sort out where Dref is coming from
-#ifdef AMD_EXTENSIONS
if (cubeCompare || f16ShadowCompare) {
-#else
- if (cubeCompare) {
-#endif
params.Dref = arguments[2];
++extraArgs;
} else if (sampler.shadow && cracked.gather) {
@@ -4623,19 +4863,15 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
if (cracked.lod) {
params.lod = arguments[2 + extraArgs];
++extraArgs;
- } else if (glslangIntermediate->getStage() != EShLangFragment
-#ifdef NV_EXTENSIONS
- // NV_compute_shader_derivatives layout qualifiers allow for implicit LODs
- && !(glslangIntermediate->getStage() == EShLangCompute &&
- (glslangIntermediate->getLayoutDerivativeModeNone() != glslang::LayoutDerivativeNone))
-#endif
- ) {
+ } else if (glslangIntermediate->getStage() != EShLangFragment &&
+ !(glslangIntermediate->getStage() == EShLangCompute &&
+ glslangIntermediate->hasLayoutDerivativeModeNone())) {
// we need to invent the default lod for an explicit lod instruction for a non-fragment stage
noImplicitLod = true;
}
// multisample
- if (sampler.ms) {
+ if (sampler.isMultiSample()) {
params.sample = arguments[2 + extraArgs]; // For MS, "sample" should be specified
++extraArgs;
}
@@ -4656,6 +4892,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
++extraArgs;
}
+#ifndef GLSLANG_WEB
// lod clamp
if (cracked.lodClamp) {
params.lodClamp = arguments[2 + extraArgs];
@@ -4666,7 +4903,6 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
params.texelOut = arguments[2 + extraArgs];
++extraArgs;
}
-
// gather component
if (cracked.gather && ! sampler.shadow) {
// default component is 0, if missing, otherwise an argument
@@ -4676,7 +4912,6 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
} else
params.component = builder.makeIntConstant(0);
}
-#ifdef NV_EXTENSIONS
spv::Id resultStruct = spv::NoResult;
if (imageFootprint) {
//Following three extra arguments
@@ -4693,7 +4928,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
++extraArgs;
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
if (imageFootprint) {
builder.addExtension(spv::E_SPV_NV_shader_image_footprint);
builder.addCapability(spv::CapabilityImageFootprintNV);
@@ -4776,6 +5011,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
}
}
+#ifndef GLSLANG_WEB
// nonprivate
if (imageType.getQualifier().nonprivate) {
params.nonprivate = true;
@@ -4785,6 +5021,7 @@ spv::Id TGlslangToSpvTraverser::createImageTextureFunctionCall(glslang::TIntermO
if (imageType.getQualifier().volatil) {
params.volatil = true;
}
+#endif
std::vector<spv::Id> result( 1,
builder.createTextureCall(precision, resultType(), sparse, cracked.fetch, cracked.proj, cracked.gather,
@@ -5019,6 +5256,30 @@ spv::Id TGlslangToSpvTraverser::createBinaryOperation(glslang::TOperator op, OpD
binOp = spv::OpLogicalNotEqual;
break;
+ case glslang::EOpAbsDifference:
+ binOp = isUnsigned ? spv::OpAbsUSubINTEL : spv::OpAbsISubINTEL;
+ break;
+
+ case glslang::EOpAddSaturate:
+ binOp = isUnsigned ? spv::OpUAddSatINTEL : spv::OpIAddSatINTEL;
+ break;
+
+ case glslang::EOpSubSaturate:
+ binOp = isUnsigned ? spv::OpUSubSatINTEL : spv::OpISubSatINTEL;
+ break;
+
+ case glslang::EOpAverage:
+ binOp = isUnsigned ? spv::OpUAverageINTEL : spv::OpIAverageINTEL;
+ break;
+
+ case glslang::EOpAverageRounded:
+ binOp = isUnsigned ? spv::OpUAverageRoundedINTEL : spv::OpIAverageRoundedINTEL;
+ break;
+
+ case glslang::EOpMul32x16:
+ binOp = isUnsigned ? spv::OpUMul32x16INTEL : spv::OpIMul32x16INTEL;
+ break;
+
case glslang::EOpLessThan:
case glslang::EOpGreaterThan:
case glslang::EOpLessThanEqual:
@@ -5045,8 +5306,8 @@ spv::Id TGlslangToSpvTraverser::createBinaryOperation(glslang::TOperator op, OpD
builder.promoteScalar(decorations.precision, left, right);
spv::Id result = builder.createBinOp(binOp, typeId, left, right);
- builder.addDecoration(result, decorations.noContraction);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNoContraction(builder, result);
+ decorations.addNonUniform(builder, result);
return builder.setPrecision(result, decorations.precision);
}
@@ -5058,7 +5319,7 @@ spv::Id TGlslangToSpvTraverser::createBinaryOperation(glslang::TOperator op, OpD
if (reduceComparison && (op == glslang::EOpEqual || op == glslang::EOpNotEqual)
&& (builder.isVector(left) || builder.isMatrix(left) || builder.isAggregate(left))) {
spv::Id result = builder.createCompositeCompare(decorations.precision, left, right, op == glslang::EOpEqual);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNonUniform(builder, result);
return result;
}
@@ -5119,8 +5380,8 @@ spv::Id TGlslangToSpvTraverser::createBinaryOperation(glslang::TOperator op, OpD
if (binOp != spv::OpNop) {
spv::Id result = builder.createBinOp(binOp, typeId, left, right);
- builder.addDecoration(result, decorations.noContraction);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNoContraction(builder, result);
+ decorations.addNonUniform(builder, result);
return builder.setPrecision(result, decorations.precision);
}
@@ -5184,8 +5445,8 @@ spv::Id TGlslangToSpvTraverser::createBinaryMatrixOperation(spv::Op op, OpDecora
if (firstClass) {
spv::Id result = builder.createBinOp(op, typeId, left, right);
- builder.addDecoration(result, decorations.noContraction);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNoContraction(builder, result);
+ decorations.addNonUniform(builder, result);
return builder.setPrecision(result, decorations.precision);
}
@@ -5224,14 +5485,14 @@ spv::Id TGlslangToSpvTraverser::createBinaryMatrixOperation(spv::Op op, OpDecora
spv::Id leftVec = leftMat ? builder.createCompositeExtract( left, vecType, indexes) : smearVec;
spv::Id rightVec = rightMat ? builder.createCompositeExtract(right, vecType, indexes) : smearVec;
spv::Id result = builder.createBinOp(op, vecType, leftVec, rightVec);
- builder.addDecoration(result, decorations.noContraction);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNoContraction(builder, result);
+ decorations.addNonUniform(builder, result);
results.push_back(builder.setPrecision(result, decorations.precision));
}
// put the pieces together
spv::Id result = builder.setPrecision(builder.createCompositeConstruct(typeId, results), decorations.precision);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNonUniform(builder, result);
return result;
}
default:
@@ -5241,7 +5502,7 @@ spv::Id TGlslangToSpvTraverser::createBinaryMatrixOperation(spv::Op op, OpDecora
}
spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDecorations& decorations, spv::Id typeId,
- spv::Id operand, glslang::TBasicType typeProxy)
+ spv::Id operand, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags)
{
spv::Op unaryOp = spv::OpNop;
int extBuiltins = -1;
@@ -5409,6 +5670,7 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
case glslang::EOpUnpackHalf2x16:
libCall = spv::GLSLstd450UnpackHalf2x16;
break;
+#ifndef GLSLANG_WEB
case glslang::EOpPackSnorm4x8:
libCall = spv::GLSLstd450PackSnorm4x8;
break;
@@ -5427,6 +5689,7 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
case glslang::EOpUnpackDouble2x32:
libCall = spv::GLSLstd450UnpackDouble2x32;
break;
+#endif
case glslang::EOpPackInt2x32:
case glslang::EOpUnpackInt2x32:
@@ -5460,6 +5723,28 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
case glslang::EOpFwidth:
unaryOp = spv::OpFwidth;
break;
+
+ case glslang::EOpAny:
+ unaryOp = spv::OpAny;
+ break;
+ case glslang::EOpAll:
+ unaryOp = spv::OpAll;
+ break;
+
+ case glslang::EOpAbs:
+ if (isFloat)
+ libCall = spv::GLSLstd450FAbs;
+ else
+ libCall = spv::GLSLstd450SAbs;
+ break;
+ case glslang::EOpSign:
+ if (isFloat)
+ libCall = spv::GLSLstd450FSign;
+ else
+ libCall = spv::GLSLstd450SSign;
+ break;
+
+#ifndef GLSLANG_WEB
case glslang::EOpDPdxFine:
unaryOp = spv::OpDPdxFine;
break;
@@ -5479,32 +5764,10 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
unaryOp = spv::OpFwidthCoarse;
break;
case glslang::EOpInterpolateAtCentroid:
-#ifdef AMD_EXTENSIONS
if (typeProxy == glslang::EbtFloat16)
builder.addExtension(spv::E_SPV_AMD_gpu_shader_half_float);
-#endif
libCall = spv::GLSLstd450InterpolateAtCentroid;
break;
- case glslang::EOpAny:
- unaryOp = spv::OpAny;
- break;
- case glslang::EOpAll:
- unaryOp = spv::OpAll;
- break;
-
- case glslang::EOpAbs:
- if (isFloat)
- libCall = spv::GLSLstd450FAbs;
- else
- libCall = spv::GLSLstd450SAbs;
- break;
- case glslang::EOpSign:
- if (isFloat)
- libCall = spv::GLSLstd450FSign;
- else
- libCall = spv::GLSLstd450SSign;
- break;
-
case glslang::EOpAtomicCounterIncrement:
case glslang::EOpAtomicCounterDecrement:
case glslang::EOpAtomicCounter:
@@ -5512,7 +5775,7 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
// Handle all of the atomics in one place, in createAtomicOperation()
std::vector<spv::Id> operands;
operands.push_back(operand);
- return createAtomicOperation(op, decorations.precision, typeId, operands, typeProxy);
+ return createAtomicOperation(op, decorations.precision, typeId, operands, typeProxy, lvalueCoherentFlags);
}
case glslang::EOpBitFieldReverse:
@@ -5531,12 +5794,23 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
libCall = spv::GLSLstd450FindSMsb;
break;
+ case glslang::EOpCountLeadingZeros:
+ builder.addCapability(spv::CapabilityIntegerFunctions2INTEL);
+ builder.addExtension("SPV_INTEL_shader_integer_functions2");
+ unaryOp = spv::OpUCountLeadingZerosINTEL;
+ break;
+
+ case glslang::EOpCountTrailingZeros:
+ builder.addCapability(spv::CapabilityIntegerFunctions2INTEL);
+ builder.addExtension("SPV_INTEL_shader_integer_functions2");
+ unaryOp = spv::OpUCountTrailingZerosINTEL;
+ break;
+
case glslang::EOpBallot:
case glslang::EOpReadFirstInvocation:
case glslang::EOpAnyInvocation:
case glslang::EOpAllInvocations:
case glslang::EOpAllInvocationsEqual:
-#ifdef AMD_EXTENSIONS
case glslang::EOpMinInvocations:
case glslang::EOpMaxInvocations:
case glslang::EOpAddInvocations:
@@ -5555,7 +5829,6 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
case glslang::EOpMinInvocationsExclusiveScanNonUniform:
case glslang::EOpMaxInvocationsExclusiveScanNonUniform:
case glslang::EOpAddInvocationsExclusiveScanNonUniform:
-#endif
{
std::vector<spv::Id> operands;
operands.push_back(operand);
@@ -5600,7 +5873,6 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
operands.push_back(operand);
return createSubgroupOperation(op, typeId, operands, typeProxy);
}
-#ifdef AMD_EXTENSIONS
case glslang::EOpMbcnt:
extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_ballot);
libCall = spv::MbcntAMD;
@@ -5615,15 +5887,13 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
extBuiltins = getExtBuiltins(spv::E_SPV_AMD_gcn_shader);
libCall = spv::CubeFaceCoordAMD;
break;
-#endif
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartition:
unaryOp = spv::OpGroupNonUniformPartitionNV;
break;
-#endif
case glslang::EOpConstructReference:
unaryOp = spv::OpBitcast;
break;
+#endif
case glslang::EOpCopyObject:
unaryOp = spv::OpCopyObject;
@@ -5642,8 +5912,8 @@ spv::Id TGlslangToSpvTraverser::createUnaryOperation(glslang::TOperator op, OpDe
id = builder.createUnaryOp(unaryOp, typeId, operand);
}
- builder.addDecoration(id, decorations.noContraction);
- builder.addDecoration(id, decorations.nonUniform);
+ decorations.addNoContraction(builder, id);
+ decorations.addNonUniform(builder, id);
return builder.setPrecision(id, decorations.precision);
}
@@ -5671,14 +5941,14 @@ spv::Id TGlslangToSpvTraverser::createUnaryMatrixOperation(spv::Op op, OpDecorat
indexes.push_back(c);
spv::Id srcVec = builder.createCompositeExtract(operand, srcVecType, indexes);
spv::Id destVec = builder.createUnaryOp(op, destVecType, srcVec);
- builder.addDecoration(destVec, decorations.noContraction);
- builder.addDecoration(destVec, decorations.nonUniform);
+ decorations.addNoContraction(builder, destVec);
+ decorations.addNonUniform(builder, destVec);
results.push_back(builder.setPrecision(destVec, decorations.precision));
}
// put the pieces together
spv::Id result = builder.setPrecision(builder.createCompositeConstruct(typeId, results), decorations.precision);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNonUniform(builder, result);
return result;
}
@@ -5770,110 +6040,49 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora
int vectorSize = builder.isVectorType(destType) ? builder.getNumTypeComponents(destType) : 0;
switch (op) {
- case glslang::EOpConvInt8ToBool:
- case glslang::EOpConvUint8ToBool:
- zero = builder.makeUint8Constant(0);
- zero = makeSmearedConstant(zero, vectorSize);
- return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
- case glslang::EOpConvInt16ToBool:
- case glslang::EOpConvUint16ToBool:
- zero = builder.makeUint16Constant(0);
- zero = makeSmearedConstant(zero, vectorSize);
- return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
case glslang::EOpConvIntToBool:
case glslang::EOpConvUintToBool:
zero = builder.makeUintConstant(0);
zero = makeSmearedConstant(zero, vectorSize);
return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
- case glslang::EOpConvInt64ToBool:
- case glslang::EOpConvUint64ToBool:
- zero = builder.makeUint64Constant(0);
- zero = makeSmearedConstant(zero, vectorSize);
- return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
-
case glslang::EOpConvFloatToBool:
zero = builder.makeFloatConstant(0.0F);
zero = makeSmearedConstant(zero, vectorSize);
return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero);
-
- case glslang::EOpConvDoubleToBool:
- zero = builder.makeDoubleConstant(0.0);
- zero = makeSmearedConstant(zero, vectorSize);
- return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero);
-
- case glslang::EOpConvFloat16ToBool:
- zero = builder.makeFloat16Constant(0.0F);
- zero = makeSmearedConstant(zero, vectorSize);
- return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero);
-
case glslang::EOpConvBoolToFloat:
convOp = spv::OpSelect;
zero = builder.makeFloatConstant(0.0F);
one = builder.makeFloatConstant(1.0F);
break;
- case glslang::EOpConvBoolToDouble:
- convOp = spv::OpSelect;
- zero = builder.makeDoubleConstant(0.0);
- one = builder.makeDoubleConstant(1.0);
- break;
-
- case glslang::EOpConvBoolToFloat16:
- convOp = spv::OpSelect;
- zero = builder.makeFloat16Constant(0.0F);
- one = builder.makeFloat16Constant(1.0F);
- break;
-
- case glslang::EOpConvBoolToInt8:
- zero = builder.makeInt8Constant(0);
- one = builder.makeInt8Constant(1);
- convOp = spv::OpSelect;
- break;
-
- case glslang::EOpConvBoolToUint8:
- zero = builder.makeUint8Constant(0);
- one = builder.makeUint8Constant(1);
- convOp = spv::OpSelect;
- break;
-
- case glslang::EOpConvBoolToInt16:
- zero = builder.makeInt16Constant(0);
- one = builder.makeInt16Constant(1);
- convOp = spv::OpSelect;
- break;
-
- case glslang::EOpConvBoolToUint16:
- zero = builder.makeUint16Constant(0);
- one = builder.makeUint16Constant(1);
- convOp = spv::OpSelect;
- break;
-
case glslang::EOpConvBoolToInt:
case glslang::EOpConvBoolToInt64:
- if (op == glslang::EOpConvBoolToInt64)
+#ifndef GLSLANG_WEB
+ if (op == glslang::EOpConvBoolToInt64) {
zero = builder.makeInt64Constant(0);
- else
- zero = builder.makeIntConstant(0);
-
- if (op == glslang::EOpConvBoolToInt64)
one = builder.makeInt64Constant(1);
- else
+ } else
+#endif
+ {
+ zero = builder.makeIntConstant(0);
one = builder.makeIntConstant(1);
+ }
convOp = spv::OpSelect;
break;
case glslang::EOpConvBoolToUint:
case glslang::EOpConvBoolToUint64:
- if (op == glslang::EOpConvBoolToUint64)
+#ifndef GLSLANG_WEB
+ if (op == glslang::EOpConvBoolToUint64) {
zero = builder.makeUint64Constant(0);
- else
- zero = builder.makeUintConstant(0);
-
- if (op == glslang::EOpConvBoolToUint64)
one = builder.makeUint64Constant(1);
- else
+ } else
+#endif
+ {
+ zero = builder.makeUintConstant(0);
one = builder.makeUintConstant(1);
+ }
convOp = spv::OpSelect;
break;
@@ -5908,17 +6117,6 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora
convOp = spv::OpConvertUToF;
break;
- case glslang::EOpConvDoubleToFloat:
- case glslang::EOpConvFloatToDouble:
- case glslang::EOpConvDoubleToFloat16:
- case glslang::EOpConvFloat16ToDouble:
- case glslang::EOpConvFloatToFloat16:
- case glslang::EOpConvFloat16ToFloat:
- convOp = spv::OpFConvert;
- if (builder.isMatrixType(destType))
- return createUnaryMatrixOperation(convOp, decorations, destType, operand, typeProxy);
- break;
-
case glslang::EOpConvFloat16ToInt8:
case glslang::EOpConvFloatToInt8:
case glslang::EOpConvDoubleToInt8:
@@ -5944,13 +6142,16 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora
case glslang::EOpConvInt64ToUint64:
if (builder.isInSpecConstCodeGenMode()) {
// Build zero scalar or vector for OpIAdd.
+#ifndef GLSLANG_WEB
if(op == glslang::EOpConvUint8ToInt8 || op == glslang::EOpConvInt8ToUint8) {
zero = builder.makeUint8Constant(0);
} else if (op == glslang::EOpConvUint16ToInt16 || op == glslang::EOpConvInt16ToUint16) {
zero = builder.makeUint16Constant(0);
} else if (op == glslang::EOpConvUint64ToInt64 || op == glslang::EOpConvInt64ToUint64) {
zero = builder.makeUint64Constant(0);
- } else {
+ } else
+#endif
+ {
zero = builder.makeUintConstant(0);
}
zero = makeSmearedConstant(zero, vectorSize);
@@ -5977,6 +6178,71 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora
convOp = spv::OpConvertFToU;
break;
+#ifndef GLSLANG_WEB
+ case glslang::EOpConvInt8ToBool:
+ case glslang::EOpConvUint8ToBool:
+ zero = builder.makeUint8Constant(0);
+ zero = makeSmearedConstant(zero, vectorSize);
+ return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
+ case glslang::EOpConvInt16ToBool:
+ case glslang::EOpConvUint16ToBool:
+ zero = builder.makeUint16Constant(0);
+ zero = makeSmearedConstant(zero, vectorSize);
+ return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
+ case glslang::EOpConvInt64ToBool:
+ case glslang::EOpConvUint64ToBool:
+ zero = builder.makeUint64Constant(0);
+ zero = makeSmearedConstant(zero, vectorSize);
+ return builder.createBinOp(spv::OpINotEqual, destType, operand, zero);
+ case glslang::EOpConvDoubleToBool:
+ zero = builder.makeDoubleConstant(0.0);
+ zero = makeSmearedConstant(zero, vectorSize);
+ return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero);
+ case glslang::EOpConvFloat16ToBool:
+ zero = builder.makeFloat16Constant(0.0F);
+ zero = makeSmearedConstant(zero, vectorSize);
+ return builder.createBinOp(spv::OpFOrdNotEqual, destType, operand, zero);
+ case glslang::EOpConvBoolToDouble:
+ convOp = spv::OpSelect;
+ zero = builder.makeDoubleConstant(0.0);
+ one = builder.makeDoubleConstant(1.0);
+ break;
+ case glslang::EOpConvBoolToFloat16:
+ convOp = spv::OpSelect;
+ zero = builder.makeFloat16Constant(0.0F);
+ one = builder.makeFloat16Constant(1.0F);
+ break;
+ case glslang::EOpConvBoolToInt8:
+ zero = builder.makeInt8Constant(0);
+ one = builder.makeInt8Constant(1);
+ convOp = spv::OpSelect;
+ break;
+ case glslang::EOpConvBoolToUint8:
+ zero = builder.makeUint8Constant(0);
+ one = builder.makeUint8Constant(1);
+ convOp = spv::OpSelect;
+ break;
+ case glslang::EOpConvBoolToInt16:
+ zero = builder.makeInt16Constant(0);
+ one = builder.makeInt16Constant(1);
+ convOp = spv::OpSelect;
+ break;
+ case glslang::EOpConvBoolToUint16:
+ zero = builder.makeUint16Constant(0);
+ one = builder.makeUint16Constant(1);
+ convOp = spv::OpSelect;
+ break;
+ case glslang::EOpConvDoubleToFloat:
+ case glslang::EOpConvFloatToDouble:
+ case glslang::EOpConvDoubleToFloat16:
+ case glslang::EOpConvFloat16ToDouble:
+ case glslang::EOpConvFloatToFloat16:
+ case glslang::EOpConvFloat16ToFloat:
+ convOp = spv::OpFConvert;
+ if (builder.isMatrixType(destType))
+ return createUnaryMatrixOperation(convOp, decorations, destType, operand, typeProxy);
+ break;
+
case glslang::EOpConvInt8ToInt16:
case glslang::EOpConvInt8ToInt:
case glslang::EOpConvInt8ToInt64:
@@ -6087,6 +6353,15 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora
case glslang::EOpConvPtrToUint64:
convOp = spv::OpConvertPtrToU;
break;
+ case glslang::EOpConvPtrToUvec2:
+ case glslang::EOpConvUvec2ToPtr:
+ if (builder.isVector(operand))
+ builder.promoteIncorporatedExtension(spv::E_SPV_EXT_physical_storage_buffer,
+ spv::E_SPV_KHR_physical_storage_buffer, spv::Spv_1_5);
+ convOp = spv::OpBitcast;
+ break;
+#endif
+
default:
break;
}
@@ -6103,7 +6378,7 @@ spv::Id TGlslangToSpvTraverser::createConversion(glslang::TOperator op, OpDecora
result = builder.createUnaryOp(convOp, destType, operand);
result = builder.setPrecision(result, decorations.precision);
- builder.addDecoration(result, decorations.nonUniform);
+ decorations.addNonUniform(builder, result);
return result;
}
@@ -6120,7 +6395,7 @@ spv::Id TGlslangToSpvTraverser::makeSmearedConstant(spv::Id constant, int vector
}
// For glslang ops that map to SPV atomic opCodes
-spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv::Decoration /*precision*/, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy)
+spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv::Decoration /*precision*/, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy, const spv::Builder::AccessChain::CoherentFlags &lvalueCoherentFlags)
{
spv::Op opCode = spv::OpNop;
@@ -6206,7 +6481,9 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv
scopeId = builder.makeUintConstant(spv::ScopeDevice);
}
// semantics default to relaxed
- spv::Id semanticsId = builder.makeUintConstant(spv::MemorySemanticsMaskNone);
+ spv::Id semanticsId = builder.makeUintConstant(lvalueCoherentFlags.isVolatile() && glslangIntermediate->usingVulkanMemoryModel() ?
+ spv::MemorySemanticsVolatileMask :
+ spv::MemorySemanticsMaskNone);
spv::Id semanticsId2 = semanticsId;
pointerId = operands[0];
@@ -6236,7 +6513,10 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv
// Check for capabilities
unsigned semanticsImmediate = builder.getConstantScalar(semanticsId) | builder.getConstantScalar(semanticsId2);
- if (semanticsImmediate & (spv::MemorySemanticsMakeAvailableKHRMask | spv::MemorySemanticsMakeVisibleKHRMask | spv::MemorySemanticsOutputMemoryKHRMask)) {
+ if (semanticsImmediate & (spv::MemorySemanticsMakeAvailableKHRMask |
+ spv::MemorySemanticsMakeVisibleKHRMask |
+ spv::MemorySemanticsOutputMemoryKHRMask |
+ spv::MemorySemanticsVolatileMask)) {
builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
}
@@ -6274,10 +6554,8 @@ spv::Id TGlslangToSpvTraverser::createAtomicOperation(glslang::TOperator op, spv
// Create group invocation operations.
spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op, spv::Id typeId, std::vector<spv::Id>& operands, glslang::TBasicType typeProxy)
{
-#ifdef AMD_EXTENSIONS
bool isUnsigned = isTypeUnsignedInt(typeProxy);
bool isFloat = isTypeFloat(typeProxy);
-#endif
spv::Op opCode = spv::OpNop;
std::vector<spv::IdImmediate> spvGroupOperands;
@@ -6294,7 +6572,6 @@ spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op
builder.addCapability(spv::CapabilitySubgroupVoteKHR);
} else {
builder.addCapability(spv::CapabilityGroups);
-#ifdef AMD_EXTENSIONS
if (op == glslang::EOpMinInvocationsNonUniform ||
op == glslang::EOpMaxInvocationsNonUniform ||
op == glslang::EOpAddInvocationsNonUniform ||
@@ -6305,9 +6582,7 @@ spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op
op == glslang::EOpMaxInvocationsExclusiveScanNonUniform ||
op == glslang::EOpAddInvocationsExclusiveScanNonUniform)
builder.addExtension(spv::E_SPV_AMD_shader_ballot);
-#endif
-#ifdef AMD_EXTENSIONS
switch (op) {
case glslang::EOpMinInvocations:
case glslang::EOpMaxInvocations:
@@ -6342,7 +6617,6 @@ spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op
spv::IdImmediate groupOp = { false, (unsigned)groupOperation };
spvGroupOperands.push_back(groupOp);
}
-#endif
}
for (auto opIt = operands.begin(); opIt != operands.end(); ++opIt) {
@@ -6389,7 +6663,6 @@ spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op
builder.createCompositeConstruct(uvec2Type, components));
}
-#ifdef AMD_EXTENSIONS
case glslang::EOpMinInvocations:
case glslang::EOpMaxInvocations:
case glslang::EOpAddInvocations:
@@ -6476,7 +6749,6 @@ spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op
return CreateInvocationsVectorOperation(opCode, groupOperation, typeId, operands);
break;
-#endif
default:
logger->missingFunctionality("invocation operation");
return spv::NoResult;
@@ -6490,7 +6762,6 @@ spv::Id TGlslangToSpvTraverser::createInvocationsOperation(glslang::TOperator op
spv::Id TGlslangToSpvTraverser::CreateInvocationsVectorOperation(spv::Op op, spv::GroupOperation groupOperation,
spv::Id typeId, std::vector<spv::Id>& operands)
{
-#ifdef AMD_EXTENSIONS
assert(op == spv::OpGroupFMin || op == spv::OpGroupUMin || op == spv::OpGroupSMin ||
op == spv::OpGroupFMax || op == spv::OpGroupUMax || op == spv::OpGroupSMax ||
op == spv::OpGroupFAdd || op == spv::OpGroupIAdd || op == spv::OpGroupBroadcast ||
@@ -6498,12 +6769,6 @@ spv::Id TGlslangToSpvTraverser::CreateInvocationsVectorOperation(spv::Op op, spv
op == spv::OpGroupFMinNonUniformAMD || op == spv::OpGroupUMinNonUniformAMD || op == spv::OpGroupSMinNonUniformAMD ||
op == spv::OpGroupFMaxNonUniformAMD || op == spv::OpGroupUMaxNonUniformAMD || op == spv::OpGroupSMaxNonUniformAMD ||
op == spv::OpGroupFAddNonUniformAMD || op == spv::OpGroupIAddNonUniformAMD);
-#else
- assert(op == spv::OpGroupFMin || op == spv::OpGroupUMin || op == spv::OpGroupSMin ||
- op == spv::OpGroupFMax || op == spv::OpGroupUMax || op == spv::OpGroupSMax ||
- op == spv::OpGroupFAdd || op == spv::OpGroupIAdd || op == spv::OpGroupBroadcast ||
- op == spv::OpSubgroupReadInvocationKHR);
-#endif
// Handle group invocation operations scalar by scalar.
// The result type is the same type as the original type.
@@ -6627,7 +6892,6 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
builder.addCapability(spv::CapabilityGroupNonUniform);
builder.addCapability(spv::CapabilityGroupNonUniformQuad);
break;
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedAdd:
case glslang::EOpSubgroupPartitionedMul:
case glslang::EOpSubgroupPartitionedMin:
@@ -6652,12 +6916,12 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
builder.addExtension(spv::E_SPV_NV_shader_subgroup_partitioned);
builder.addCapability(spv::CapabilityGroupNonUniformPartitionedNV);
break;
-#endif
default: assert(0 && "Unhandled subgroup operation!");
}
- const bool isUnsigned = typeProxy == glslang::EbtUint || typeProxy == glslang::EbtUint64;
- const bool isFloat = typeProxy == glslang::EbtFloat || typeProxy == glslang::EbtDouble;
+
+ const bool isUnsigned = isTypeUnsignedInt(typeProxy);
+ const bool isFloat = isTypeFloat(typeProxy);
const bool isBool = typeProxy == glslang::EbtBool;
spv::Op opCode = spv::OpNop;
@@ -6686,11 +6950,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveAdd:
case glslang::EOpSubgroupExclusiveAdd:
case glslang::EOpSubgroupClusteredAdd:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedAdd:
case glslang::EOpSubgroupPartitionedInclusiveAdd:
case glslang::EOpSubgroupPartitionedExclusiveAdd:
-#endif
if (isFloat) {
opCode = spv::OpGroupNonUniformFAdd;
} else {
@@ -6701,11 +6963,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveMul:
case glslang::EOpSubgroupExclusiveMul:
case glslang::EOpSubgroupClusteredMul:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedMul:
case glslang::EOpSubgroupPartitionedInclusiveMul:
case glslang::EOpSubgroupPartitionedExclusiveMul:
-#endif
if (isFloat) {
opCode = spv::OpGroupNonUniformFMul;
} else {
@@ -6716,11 +6976,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveMin:
case glslang::EOpSubgroupExclusiveMin:
case glslang::EOpSubgroupClusteredMin:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedMin:
case glslang::EOpSubgroupPartitionedInclusiveMin:
case glslang::EOpSubgroupPartitionedExclusiveMin:
-#endif
if (isFloat) {
opCode = spv::OpGroupNonUniformFMin;
} else if (isUnsigned) {
@@ -6733,11 +6991,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveMax:
case glslang::EOpSubgroupExclusiveMax:
case glslang::EOpSubgroupClusteredMax:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedMax:
case glslang::EOpSubgroupPartitionedInclusiveMax:
case glslang::EOpSubgroupPartitionedExclusiveMax:
-#endif
if (isFloat) {
opCode = spv::OpGroupNonUniformFMax;
} else if (isUnsigned) {
@@ -6750,11 +7006,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveAnd:
case glslang::EOpSubgroupExclusiveAnd:
case glslang::EOpSubgroupClusteredAnd:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedAnd:
case glslang::EOpSubgroupPartitionedInclusiveAnd:
case glslang::EOpSubgroupPartitionedExclusiveAnd:
-#endif
if (isBool) {
opCode = spv::OpGroupNonUniformLogicalAnd;
} else {
@@ -6765,11 +7019,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveOr:
case glslang::EOpSubgroupExclusiveOr:
case glslang::EOpSubgroupClusteredOr:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedOr:
case glslang::EOpSubgroupPartitionedInclusiveOr:
case glslang::EOpSubgroupPartitionedExclusiveOr:
-#endif
if (isBool) {
opCode = spv::OpGroupNonUniformLogicalOr;
} else {
@@ -6780,11 +7032,9 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupInclusiveXor:
case glslang::EOpSubgroupExclusiveXor:
case glslang::EOpSubgroupClusteredXor:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedXor:
case glslang::EOpSubgroupPartitionedInclusiveXor:
case glslang::EOpSubgroupPartitionedExclusiveXor:
-#endif
if (isBool) {
opCode = spv::OpGroupNonUniformLogicalXor;
} else {
@@ -6842,7 +7092,6 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupClusteredXor:
groupOperation = spv::GroupOperationClusteredReduce;
break;
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedAdd:
case glslang::EOpSubgroupPartitionedMul:
case glslang::EOpSubgroupPartitionedMin:
@@ -6870,7 +7119,6 @@ spv::Id TGlslangToSpvTraverser::createSubgroupOperation(glslang::TOperator op, s
case glslang::EOpSubgroupPartitionedExclusiveXor:
groupOperation = spv::GroupOperationPartitionedExclusiveScanNV;
break;
-#endif
}
// build the instruction
@@ -6928,7 +7176,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
switch (op) {
case glslang::EOpMin:
if (isFloat)
- libCall = spv::GLSLstd450FMin;
+ libCall = nanMinMaxClamp ? spv::GLSLstd450NMin : spv::GLSLstd450FMin;
else if (isUnsigned)
libCall = spv::GLSLstd450UMin;
else
@@ -6940,7 +7188,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
break;
case glslang::EOpMax:
if (isFloat)
- libCall = spv::GLSLstd450FMax;
+ libCall = nanMinMaxClamp ? spv::GLSLstd450NMax : spv::GLSLstd450FMax;
else if (isUnsigned)
libCall = spv::GLSLstd450UMax;
else
@@ -6959,7 +7207,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
case glslang::EOpClamp:
if (isFloat)
- libCall = spv::GLSLstd450FClamp;
+ libCall = nanMinMaxClamp ? spv::GLSLstd450NClamp : spv::GLSLstd450FClamp;
else if (isUnsigned)
libCall = spv::GLSLstd450UClamp;
else
@@ -7002,18 +7250,57 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
case glslang::EOpRefract:
libCall = spv::GLSLstd450Refract;
break;
+ case glslang::EOpBarrier:
+ {
+ // This is for the extended controlBarrier function, with four operands.
+ // The unextended barrier() goes through createNoArgOperation.
+ assert(operands.size() == 4);
+ unsigned int executionScope = builder.getConstantScalar(operands[0]);
+ unsigned int memoryScope = builder.getConstantScalar(operands[1]);
+ unsigned int semantics = builder.getConstantScalar(operands[2]) | builder.getConstantScalar(operands[3]);
+ builder.createControlBarrier((spv::Scope)executionScope, (spv::Scope)memoryScope, (spv::MemorySemanticsMask)semantics);
+ if (semantics & (spv::MemorySemanticsMakeAvailableKHRMask |
+ spv::MemorySemanticsMakeVisibleKHRMask |
+ spv::MemorySemanticsOutputMemoryKHRMask |
+ spv::MemorySemanticsVolatileMask)) {
+ builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
+ }
+ if (glslangIntermediate->usingVulkanMemoryModel() && (executionScope == spv::ScopeDevice || memoryScope == spv::ScopeDevice)) {
+ builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR);
+ }
+ return 0;
+ }
+ break;
+ case glslang::EOpMemoryBarrier:
+ {
+ // This is for the extended memoryBarrier function, with three operands.
+ // The unextended memoryBarrier() goes through createNoArgOperation.
+ assert(operands.size() == 3);
+ unsigned int memoryScope = builder.getConstantScalar(operands[0]);
+ unsigned int semantics = builder.getConstantScalar(operands[1]) | builder.getConstantScalar(operands[2]);
+ builder.createMemoryBarrier((spv::Scope)memoryScope, (spv::MemorySemanticsMask)semantics);
+ if (semantics & (spv::MemorySemanticsMakeAvailableKHRMask |
+ spv::MemorySemanticsMakeVisibleKHRMask |
+ spv::MemorySemanticsOutputMemoryKHRMask |
+ spv::MemorySemanticsVolatileMask)) {
+ builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
+ }
+ if (glslangIntermediate->usingVulkanMemoryModel() && memoryScope == spv::ScopeDevice) {
+ builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR);
+ }
+ return 0;
+ }
+ break;
+
+#ifndef GLSLANG_WEB
case glslang::EOpInterpolateAtSample:
-#ifdef AMD_EXTENSIONS
if (typeProxy == glslang::EbtFloat16)
builder.addExtension(spv::E_SPV_AMD_gpu_shader_half_float);
-#endif
libCall = spv::GLSLstd450InterpolateAtSample;
break;
case glslang::EOpInterpolateAtOffset:
-#ifdef AMD_EXTENSIONS
if (typeProxy == glslang::EbtFloat16)
builder.addExtension(spv::E_SPV_AMD_gpu_shader_half_float);
-#endif
libCall = spv::GLSLstd450InterpolateAtOffset;
break;
case glslang::EOpAddCarry:
@@ -7055,11 +7342,9 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
assert(builder.isPointerType(typeId1));
typeId1 = builder.getContainedTypeId(typeId1);
int width = builder.getScalarTypeWidth(typeId1);
-#ifdef AMD_EXTENSIONS
if (width == 16)
// Using 16-bit exp operand, enable extension SPV_AMD_gpu_shader_int16
builder.addExtension(spv::E_SPV_AMD_gpu_shader_int16);
-#endif
if (builder.getNumComponents(operands[0]) == 1)
frexpIntType = builder.makeIntegerType(width, true);
else
@@ -7089,7 +7374,6 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
case glslang::EOpSubgroupClusteredOr:
case glslang::EOpSubgroupClusteredXor:
case glslang::EOpSubgroupQuadBroadcast:
-#ifdef NV_EXTENSIONS
case glslang::EOpSubgroupPartitionedAdd:
case glslang::EOpSubgroupPartitionedMul:
case glslang::EOpSubgroupPartitionedMin:
@@ -7111,10 +7395,8 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
case glslang::EOpSubgroupPartitionedExclusiveAnd:
case glslang::EOpSubgroupPartitionedExclusiveOr:
case glslang::EOpSubgroupPartitionedExclusiveXor:
-#endif
return createSubgroupOperation(op, typeId, operands, typeProxy);
-#ifdef AMD_EXTENSIONS
case glslang::EOpSwizzleInvocations:
extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_ballot);
libCall = spv::SwizzleInvocationsAMD;
@@ -7168,44 +7450,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
extBuiltins = getExtBuiltins(spv::E_SPV_AMD_shader_explicit_vertex_parameter);
libCall = spv::InterpolateAtVertexAMD;
break;
-#endif
- case glslang::EOpBarrier:
- {
- // This is for the extended controlBarrier function, with four operands.
- // The unextended barrier() goes through createNoArgOperation.
- assert(operands.size() == 4);
- unsigned int executionScope = builder.getConstantScalar(operands[0]);
- unsigned int memoryScope = builder.getConstantScalar(operands[1]);
- unsigned int semantics = builder.getConstantScalar(operands[2]) | builder.getConstantScalar(operands[3]);
- builder.createControlBarrier((spv::Scope)executionScope, (spv::Scope)memoryScope, (spv::MemorySemanticsMask)semantics);
- if (semantics & (spv::MemorySemanticsMakeAvailableKHRMask | spv::MemorySemanticsMakeVisibleKHRMask | spv::MemorySemanticsOutputMemoryKHRMask)) {
- builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
- }
- if (glslangIntermediate->usingVulkanMemoryModel() && (executionScope == spv::ScopeDevice || memoryScope == spv::ScopeDevice)) {
- builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR);
- }
- return 0;
- }
- break;
- case glslang::EOpMemoryBarrier:
- {
- // This is for the extended memoryBarrier function, with three operands.
- // The unextended memoryBarrier() goes through createNoArgOperation.
- assert(operands.size() == 3);
- unsigned int memoryScope = builder.getConstantScalar(operands[0]);
- unsigned int semantics = builder.getConstantScalar(operands[1]) | builder.getConstantScalar(operands[2]);
- builder.createMemoryBarrier((spv::Scope)memoryScope, (spv::MemorySemanticsMask)semantics);
- if (semantics & (spv::MemorySemanticsMakeAvailableKHRMask | spv::MemorySemanticsMakeVisibleKHRMask | spv::MemorySemanticsOutputMemoryKHRMask)) {
- builder.addCapability(spv::CapabilityVulkanMemoryModelKHR);
- }
- if (glslangIntermediate->usingVulkanMemoryModel() && memoryScope == spv::ScopeDevice) {
- builder.addCapability(spv::CapabilityVulkanMemoryModelDeviceScopeKHR);
- }
- return 0;
- }
- break;
-#ifdef NV_EXTENSIONS
case glslang::EOpReportIntersectionNV:
{
typeId = builder.makeBoolType();
@@ -7227,11 +7472,10 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
case glslang::EOpWritePackedPrimitiveIndices4x8NV:
builder.createNoResultOp(spv::OpWritePackedPrimitiveIndices4x8NV, operands);
return 0;
-#endif
case glslang::EOpCooperativeMatrixMulAdd:
opCode = spv::OpCooperativeMatrixMulAddNV;
break;
-
+#endif // GLSLANG_WEB
default:
return 0;
}
@@ -7252,7 +7496,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
id = builder.createCompositeExtract(mulOp, typeId, 0);
for (int i = 1; i < componentCount; ++i) {
builder.setPrecision(id, precision);
- id = builder.createBinOp(spv::OpIAdd, typeId, id, builder.createCompositeExtract(operands[0], typeId, i));
+ id = builder.createBinOp(spv::OpIAdd, typeId, id, builder.createCompositeExtract(mulOp, typeId, i));
}
} else {
switch (consumedOperands) {
@@ -7275,6 +7519,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
}
}
+#ifndef GLSLANG_WEB
// Decode the return types that were structures
switch (op) {
case glslang::EOpAddCarry:
@@ -7304,6 +7549,7 @@ spv::Id TGlslangToSpvTraverser::createMiscOperation(glslang::TOperator op, spv::
default:
break;
}
+#endif
return builder.setPrecision(id, precision);
}
@@ -7315,12 +7561,6 @@ spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv:
spv::Scope memoryBarrierScope = glslangIntermediate->usingVulkanMemoryModel() ? spv::ScopeQueueFamilyKHR : spv::ScopeDevice;
switch (op) {
- case glslang::EOpEmitVertex:
- builder.createNoResultOp(spv::OpEmitVertex);
- return 0;
- case glslang::EOpEndPrimitive:
- builder.createNoResultOp(spv::OpEndPrimitive);
- return 0;
case glslang::EOpBarrier:
if (glslangIntermediate->getStage() == EShLangTessControl) {
if (glslangIntermediate->usingVulkanMemoryModel()) {
@@ -7341,18 +7581,10 @@ spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv:
builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsAllMemory |
spv::MemorySemanticsAcquireReleaseMask);
return 0;
- case glslang::EOpMemoryBarrierAtomicCounter:
- builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsAtomicCounterMemoryMask |
- spv::MemorySemanticsAcquireReleaseMask);
- return 0;
case glslang::EOpMemoryBarrierBuffer:
builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsUniformMemoryMask |
spv::MemorySemanticsAcquireReleaseMask);
return 0;
- case glslang::EOpMemoryBarrierImage:
- builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsImageMemoryMask |
- spv::MemorySemanticsAcquireReleaseMask);
- return 0;
case glslang::EOpMemoryBarrierShared:
builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsWorkgroupMemoryMask |
spv::MemorySemanticsAcquireReleaseMask);
@@ -7361,6 +7593,15 @@ spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv:
builder.createMemoryBarrier(spv::ScopeWorkgroup, spv::MemorySemanticsAllMemory |
spv::MemorySemanticsAcquireReleaseMask);
return 0;
+#ifndef GLSLANG_WEB
+ case glslang::EOpMemoryBarrierAtomicCounter:
+ builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsAtomicCounterMemoryMask |
+ spv::MemorySemanticsAcquireReleaseMask);
+ return 0;
+ case glslang::EOpMemoryBarrierImage:
+ builder.createMemoryBarrier(memoryBarrierScope, spv::MemorySemanticsImageMemoryMask |
+ spv::MemorySemanticsAcquireReleaseMask);
+ return 0;
case glslang::EOpAllMemoryBarrierWithGroupSync:
builder.createControlBarrier(spv::ScopeWorkgroup, spv::ScopeDevice,
spv::MemorySemanticsAllMemory |
@@ -7405,30 +7646,69 @@ spv::Id TGlslangToSpvTraverser::createNoArgOperation(glslang::TOperator op, spv:
builder.createMemoryBarrier(spv::ScopeSubgroup, spv::MemorySemanticsWorkgroupMemoryMask |
spv::MemorySemanticsAcquireReleaseMask);
return spv::NoResult;
+
+ case glslang::EOpEmitVertex:
+ builder.createNoResultOp(spv::OpEmitVertex);
+ return 0;
+ case glslang::EOpEndPrimitive:
+ builder.createNoResultOp(spv::OpEndPrimitive);
+ return 0;
+
case glslang::EOpSubgroupElect: {
std::vector<spv::Id> operands;
return createSubgroupOperation(op, typeId, operands, glslang::EbtVoid);
}
-#ifdef AMD_EXTENSIONS
case glslang::EOpTime:
{
std::vector<spv::Id> args; // Dummy arguments
spv::Id id = builder.createBuiltinCall(typeId, getExtBuiltins(spv::E_SPV_AMD_gcn_shader), spv::TimeAMD, args);
return builder.setPrecision(id, precision);
}
-#endif
-#ifdef NV_EXTENSIONS
case glslang::EOpIgnoreIntersectionNV:
builder.createNoResultOp(spv::OpIgnoreIntersectionNV);
return 0;
case glslang::EOpTerminateRayNV:
builder.createNoResultOp(spv::OpTerminateRayNV);
return 0;
+
+ case glslang::EOpBeginInvocationInterlock:
+ builder.createNoResultOp(spv::OpBeginInvocationInterlockEXT);
+ return 0;
+ case glslang::EOpEndInvocationInterlock:
+ builder.createNoResultOp(spv::OpEndInvocationInterlockEXT);
+ return 0;
+
+ case glslang::EOpIsHelperInvocation:
+ {
+ std::vector<spv::Id> args; // Dummy arguments
+ builder.addExtension(spv::E_SPV_EXT_demote_to_helper_invocation);
+ builder.addCapability(spv::CapabilityDemoteToHelperInvocationEXT);
+ return builder.createOp(spv::OpIsHelperInvocationEXT, typeId, args);
+ }
+
+ case glslang::EOpReadClockSubgroupKHR: {
+ std::vector<spv::Id> args;
+ args.push_back(builder.makeUintConstant(spv::ScopeSubgroup));
+ builder.addExtension(spv::E_SPV_KHR_shader_clock);
+ builder.addCapability(spv::CapabilityShaderClockKHR);
+ return builder.createOp(spv::OpReadClockKHR, typeId, args);
+ }
+
+ case glslang::EOpReadClockDeviceKHR: {
+ std::vector<spv::Id> args;
+ args.push_back(builder.makeUintConstant(spv::ScopeDevice));
+ builder.addExtension(spv::E_SPV_KHR_shader_clock);
+ builder.addCapability(spv::CapabilityShaderClockKHR);
+ return builder.createOp(spv::OpReadClockKHR, typeId, args);
+ }
#endif
default:
- logger->missingFunctionality("unknown operation with no arguments");
- return 0;
+ break;
}
+
+ logger->missingFunctionality("unknown operation with no arguments");
+
+ return 0;
}
spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol)
@@ -7441,22 +7721,26 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
}
// it was not found, create it
- id = createSpvVariable(symbol);
+ spv::BuiltIn builtIn = TranslateBuiltInDecoration(symbol->getQualifier().builtIn, false);
+ auto forcedType = getForcedType(builtIn, symbol->getType());
+ id = createSpvVariable(symbol, forcedType.first);
symbolValues[symbol->getId()] = id;
+ if (forcedType.second != spv::NoType)
+ forceType[id] = forcedType.second;
if (symbol->getBasicType() != glslang::EbtBlock) {
builder.addDecoration(id, TranslatePrecisionDecoration(symbol->getType()));
builder.addDecoration(id, TranslateInterpolationDecoration(symbol->getType().getQualifier()));
builder.addDecoration(id, TranslateAuxiliaryStorageDecoration(symbol->getType().getQualifier()));
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
addMeshNVDecoration(id, /*member*/ -1, symbol->getType().getQualifier());
+ if (symbol->getQualifier().hasComponent())
+ builder.addDecoration(id, spv::DecorationComponent, symbol->getQualifier().layoutComponent);
+ if (symbol->getQualifier().hasIndex())
+ builder.addDecoration(id, spv::DecorationIndex, symbol->getQualifier().layoutIndex);
#endif
if (symbol->getType().getQualifier().hasSpecConstantId())
builder.addDecoration(id, spv::DecorationSpecId, symbol->getType().getQualifier().layoutSpecConstantId);
- if (symbol->getQualifier().hasIndex())
- builder.addDecoration(id, spv::DecorationIndex, symbol->getQualifier().layoutIndex);
- if (symbol->getQualifier().hasComponent())
- builder.addDecoration(id, spv::DecorationComponent, symbol->getQualifier().layoutComponent);
// atomic counters use this:
if (symbol->getQualifier().hasOffset())
builder.addDecoration(id, spv::DecorationOffset, symbol->getQualifier().layoutOffset);
@@ -7495,6 +7779,12 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
builder.addDecoration(id, spv::DecorationOffset, symbol->getQualifier().layoutXfbOffset);
}
+ // add built-in variable decoration
+ if (builtIn != spv::BuiltInMax) {
+ builder.addDecoration(id, spv::DecorationBuiltIn, (int)builtIn);
+ }
+
+#ifndef GLSLANG_WEB
if (symbol->getType().isImage()) {
std::vector<spv::Decoration> memory;
TranslateMemoryDecoration(symbol->getType().getQualifier(), memory, glslangIntermediate->usingVulkanMemoryModel());
@@ -7502,15 +7792,9 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
builder.addDecoration(id, memory[i]);
}
- // built-in variable decorations
- spv::BuiltIn builtIn = TranslateBuiltInDecoration(symbol->getQualifier().builtIn, false);
- if (builtIn != spv::BuiltInMax)
- builder.addDecoration(id, spv::DecorationBuiltIn, (int)builtIn);
-
// nonuniform
builder.addDecoration(id, TranslateNonUniformDecoration(symbol->getType().getQualifier()));
-#ifdef NV_EXTENSIONS
if (builtIn == spv::BuiltInSampleMask) {
spv::Decoration decoration;
// GL_NV_sample_mask_override_coverage extension
@@ -7520,6 +7804,7 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
decoration = (spv::Decoration)spv::DecorationMax;
builder.addDecoration(id, decoration);
if (decoration != spv::DecorationMax) {
+ builder.addCapability(spv::CapabilitySampleMaskOverrideCoverageNV);
builder.addExtension(spv::E_SPV_NV_sample_mask_override_coverage);
}
}
@@ -7548,7 +7833,6 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
builder.addCapability(spv::CapabilityFragmentBarycentricNV);
builder.addExtension(spv::E_SPV_NV_fragment_shader_barycentric);
}
-#endif
if (glslangIntermediate->getHlslFunctionality1() && symbol->getType().getQualifier().semanticName != nullptr) {
builder.addExtension("SPV_GOOGLE_hlsl_functionality1");
@@ -7556,14 +7840,15 @@ spv::Id TGlslangToSpvTraverser::getSymbolId(const glslang::TIntermSymbol* symbol
symbol->getType().getQualifier().semanticName);
}
- if (symbol->getBasicType() == glslang::EbtReference) {
+ if (symbol->isReference()) {
builder.addDecoration(id, symbol->getType().getQualifier().restrict ? spv::DecorationRestrictPointerEXT : spv::DecorationAliasedPointerEXT);
}
+#endif
return id;
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
// add per-primitive, per-view. per-task decorations to a struct member (member >= 0) or an object
void TGlslangToSpvTraverser::addMeshNVDecoration(spv::Id id, int member, const glslang::TQualifier& qualifier)
{
@@ -7699,6 +7984,19 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
for (unsigned int i = 0; i < (unsigned int)glslangType.getVectorSize(); ++i) {
bool zero = nextConst >= consts.size();
switch (glslangType.getBasicType()) {
+ case glslang::EbtInt:
+ spvConsts.push_back(builder.makeIntConstant(zero ? 0 : consts[nextConst].getIConst()));
+ break;
+ case glslang::EbtUint:
+ spvConsts.push_back(builder.makeUintConstant(zero ? 0 : consts[nextConst].getUConst()));
+ break;
+ case glslang::EbtFloat:
+ spvConsts.push_back(builder.makeFloatConstant(zero ? 0.0F : (float)consts[nextConst].getDConst()));
+ break;
+ case glslang::EbtBool:
+ spvConsts.push_back(builder.makeBoolConstant(zero ? false : consts[nextConst].getBConst()));
+ break;
+#ifndef GLSLANG_WEB
case glslang::EbtInt8:
spvConsts.push_back(builder.makeInt8Constant(zero ? 0 : consts[nextConst].getI8Const()));
break;
@@ -7711,30 +8009,19 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
case glslang::EbtUint16:
spvConsts.push_back(builder.makeUint16Constant(zero ? 0 : consts[nextConst].getU16Const()));
break;
- case glslang::EbtInt:
- spvConsts.push_back(builder.makeIntConstant(zero ? 0 : consts[nextConst].getIConst()));
- break;
- case glslang::EbtUint:
- spvConsts.push_back(builder.makeUintConstant(zero ? 0 : consts[nextConst].getUConst()));
- break;
case glslang::EbtInt64:
spvConsts.push_back(builder.makeInt64Constant(zero ? 0 : consts[nextConst].getI64Const()));
break;
case glslang::EbtUint64:
spvConsts.push_back(builder.makeUint64Constant(zero ? 0 : consts[nextConst].getU64Const()));
break;
- case glslang::EbtFloat:
- spvConsts.push_back(builder.makeFloatConstant(zero ? 0.0F : (float)consts[nextConst].getDConst()));
- break;
case glslang::EbtDouble:
spvConsts.push_back(builder.makeDoubleConstant(zero ? 0.0 : consts[nextConst].getDConst()));
break;
case glslang::EbtFloat16:
spvConsts.push_back(builder.makeFloat16Constant(zero ? 0.0F : (float)consts[nextConst].getDConst()));
break;
- case glslang::EbtBool:
- spvConsts.push_back(builder.makeBoolConstant(zero ? false : consts[nextConst].getBConst()));
- break;
+#endif
default:
assert(0);
break;
@@ -7746,6 +8033,19 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
bool zero = nextConst >= consts.size();
spv::Id scalar = 0;
switch (glslangType.getBasicType()) {
+ case glslang::EbtInt:
+ scalar = builder.makeIntConstant(zero ? 0 : consts[nextConst].getIConst(), specConstant);
+ break;
+ case glslang::EbtUint:
+ scalar = builder.makeUintConstant(zero ? 0 : consts[nextConst].getUConst(), specConstant);
+ break;
+ case glslang::EbtFloat:
+ scalar = builder.makeFloatConstant(zero ? 0.0F : (float)consts[nextConst].getDConst(), specConstant);
+ break;
+ case glslang::EbtBool:
+ scalar = builder.makeBoolConstant(zero ? false : consts[nextConst].getBConst(), specConstant);
+ break;
+#ifndef GLSLANG_WEB
case glslang::EbtInt8:
scalar = builder.makeInt8Constant(zero ? 0 : consts[nextConst].getI8Const(), specConstant);
break;
@@ -7758,34 +8058,23 @@ spv::Id TGlslangToSpvTraverser::createSpvConstantFromConstUnionArray(const glsla
case glslang::EbtUint16:
scalar = builder.makeUint16Constant(zero ? 0 : consts[nextConst].getU16Const(), specConstant);
break;
- case glslang::EbtInt:
- scalar = builder.makeIntConstant(zero ? 0 : consts[nextConst].getIConst(), specConstant);
- break;
- case glslang::EbtUint:
- scalar = builder.makeUintConstant(zero ? 0 : consts[nextConst].getUConst(), specConstant);
- break;
case glslang::EbtInt64:
scalar = builder.makeInt64Constant(zero ? 0 : consts[nextConst].getI64Const(), specConstant);
break;
case glslang::EbtUint64:
scalar = builder.makeUint64Constant(zero ? 0 : consts[nextConst].getU64Const(), specConstant);
break;
- case glslang::EbtFloat:
- scalar = builder.makeFloatConstant(zero ? 0.0F : (float)consts[nextConst].getDConst(), specConstant);
- break;
case glslang::EbtDouble:
scalar = builder.makeDoubleConstant(zero ? 0.0 : consts[nextConst].getDConst(), specConstant);
break;
case glslang::EbtFloat16:
scalar = builder.makeFloat16Constant(zero ? 0.0F : (float)consts[nextConst].getDConst(), specConstant);
break;
- case glslang::EbtBool:
- scalar = builder.makeBoolConstant(zero ? false : consts[nextConst].getBConst(), specConstant);
- break;
case glslang::EbtReference:
scalar = builder.makeUint64Constant(zero ? 0 : consts[nextConst].getU64Const(), specConstant);
scalar = builder.createUnaryOp(spv::OpBitcast, typeId, scalar);
break;
+#endif
default:
assert(0);
break;
@@ -7929,7 +8218,7 @@ spv::Id TGlslangToSpvTraverser::createShortCircuit(glslang::TOperator op, glslan
return builder.createOp(spv::OpPhi, boolTypeId, phiOperands);
}
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
// Return type Id of the imported set of extended instructions corresponds to the name.
// Import this set if it has not been imported yet.
spv::Id TGlslangToSpvTraverser::getExtBuiltins(const char* name)
@@ -7969,7 +8258,8 @@ int GetSpirvGeneratorVersion()
// return 5; // make OpArrayLength result type be an int with signedness of 0
// return 6; // revert version 5 change, which makes a different (new) kind of incorrect code,
// versions 4 and 6 each generate OpArrayLength as it has long been done
- return 7; // GLSL volatile keyword maps to both SPIR-V decorations Volatile and Coherent
+ // return 7; // GLSL volatile keyword maps to both SPIR-V decorations Volatile and Coherent
+ return 8; // switch to new dead block eliminator; use OpUnreachable
}
// Write SPIR-V out to a binary file
@@ -7989,6 +8279,7 @@ void OutputSpvBin(const std::vector<unsigned int>& spirv, const char* baseName)
// Write SPIR-V out to a text file with 32-bit hexadecimal words
void OutputSpvHex(const std::vector<unsigned int>& spirv, const char* baseName, const char* varName)
{
+#ifndef GLSLANG_WEB
std::ofstream out;
out.open(baseName, std::ios::binary | std::ios::out);
if (out.fail())
@@ -8016,6 +8307,7 @@ void OutputSpvHex(const std::vector<unsigned int>& spirv, const char* baseName,
out << "};";
}
out.close();
+#endif
}
//
@@ -8049,11 +8341,14 @@ void GlslangToSpv(const TIntermediate& intermediate, std::vector<unsigned int>&
#if ENABLE_OPT
// If from HLSL, run spirv-opt to "legalize" the SPIR-V for Vulkan
// eg. forward and remove memory writes of opaque types.
- if ((intermediate.getSource() == EShSourceHlsl || options->optimizeSize) && !options->disableOptimizer)
+ bool prelegalization = intermediate.getSource() == EShSourceHlsl;
+ if ((intermediate.getSource() == EShSourceHlsl || options->optimizeSize) && !options->disableOptimizer) {
SpirvToolsLegalize(intermediate, spirv, logger, options);
+ prelegalization = false;
+ }
if (options->validate)
- SpirvToolsValidate(intermediate, spirv, logger);
+ SpirvToolsValidate(intermediate, spirv, logger, prelegalization);
if (options->disassemble)
SpirvToolsDisassemble(std::cout, spirv);
diff --git a/thirdparty/glslang/SPIRV/GlslangToSpv.h b/thirdparty/glslang/SPIRV/GlslangToSpv.h
index 86e1c23bf6..3907be43b7 100644..100755
--- a/thirdparty/glslang/SPIRV/GlslangToSpv.h
+++ b/thirdparty/glslang/SPIRV/GlslangToSpv.h
@@ -40,7 +40,7 @@
#endif
#include "SpvTools.h"
-#include "../glslang/Include/intermediate.h"
+#include "glslang/Include/intermediate.h"
#include <string>
#include <vector>
diff --git a/thirdparty/glslang/SPIRV/InReadableOrder.cpp b/thirdparty/glslang/SPIRV/InReadableOrder.cpp
index 52b29613a4..9d9410be93 100644
--- a/thirdparty/glslang/SPIRV/InReadableOrder.cpp
+++ b/thirdparty/glslang/SPIRV/InReadableOrder.cpp
@@ -61,17 +61,22 @@ namespace {
// Use by calling visit() on the root block.
class ReadableOrderTraverser {
public:
- explicit ReadableOrderTraverser(std::function<void(Block*)> callback) : callback_(callback) {}
+ ReadableOrderTraverser(std::function<void(Block*, spv::ReachReason, Block*)> callback)
+ : callback_(callback) {}
// Visits the block if it hasn't been visited already and isn't currently
- // being delayed. Invokes callback(block), then descends into its
+ // being delayed. Invokes callback(block, why, header), then descends into its
// successors. Delays merge-block and continue-block processing until all
- // the branches have been completed.
- void visit(Block* block)
+ // the branches have been completed. If |block| is an unreachable merge block or
+ // an unreachable continue target, then |header| is the corresponding header block.
+ void visit(Block* block, spv::ReachReason why, Block* header)
{
assert(block);
+ if (why == spv::ReachViaControlFlow) {
+ reachableViaControlFlow_.insert(block);
+ }
if (visited_.count(block) || delayed_.count(block))
return;
- callback_(block);
+ callback_(block, why, header);
visited_.insert(block);
Block* mergeBlock = nullptr;
Block* continueBlock = nullptr;
@@ -87,27 +92,40 @@ public:
delayed_.insert(continueBlock);
}
}
- const auto successors = block->getSuccessors();
- for (auto it = successors.cbegin(); it != successors.cend(); ++it)
- visit(*it);
+ if (why == spv::ReachViaControlFlow) {
+ const auto& successors = block->getSuccessors();
+ for (auto it = successors.cbegin(); it != successors.cend(); ++it)
+ visit(*it, why, nullptr);
+ }
if (continueBlock) {
+ const spv::ReachReason continueWhy =
+ (reachableViaControlFlow_.count(continueBlock) > 0)
+ ? spv::ReachViaControlFlow
+ : spv::ReachDeadContinue;
delayed_.erase(continueBlock);
- visit(continueBlock);
+ visit(continueBlock, continueWhy, block);
}
if (mergeBlock) {
+ const spv::ReachReason mergeWhy =
+ (reachableViaControlFlow_.count(mergeBlock) > 0)
+ ? spv::ReachViaControlFlow
+ : spv::ReachDeadMerge;
delayed_.erase(mergeBlock);
- visit(mergeBlock);
+ visit(mergeBlock, mergeWhy, block);
}
}
private:
- std::function<void(Block*)> callback_;
+ std::function<void(Block*, spv::ReachReason, Block*)> callback_;
// Whether a block has already been visited or is being delayed.
std::unordered_set<Block *> visited_, delayed_;
+
+ // The set of blocks that actually are reached via control flow.
+ std::unordered_set<Block *> reachableViaControlFlow_;
};
}
-void spv::inReadableOrder(Block* root, std::function<void(Block*)> callback)
+void spv::inReadableOrder(Block* root, std::function<void(Block*, spv::ReachReason, Block*)> callback)
{
- ReadableOrderTraverser(callback).visit(root);
+ ReadableOrderTraverser(callback).visit(root, spv::ReachViaControlFlow, nullptr);
}
diff --git a/thirdparty/glslang/SPIRV/Logger.cpp b/thirdparty/glslang/SPIRV/Logger.cpp
index 48bd4e3ade..7ea0c6342b 100644
--- a/thirdparty/glslang/SPIRV/Logger.cpp
+++ b/thirdparty/glslang/SPIRV/Logger.cpp
@@ -32,6 +32,8 @@
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
+#ifndef GLSLANG_WEB
+
#include "Logger.h"
#include <algorithm>
@@ -66,3 +68,5 @@ std::string SpvBuildLogger::getAllMessages() const {
}
} // end spv namespace
+
+#endif \ No newline at end of file
diff --git a/thirdparty/glslang/SPIRV/Logger.h b/thirdparty/glslang/SPIRV/Logger.h
index 2e4ddaf517..411367c030 100644
--- a/thirdparty/glslang/SPIRV/Logger.h
+++ b/thirdparty/glslang/SPIRV/Logger.h
@@ -46,6 +46,14 @@ class SpvBuildLogger {
public:
SpvBuildLogger() {}
+#ifdef GLSLANG_WEB
+ void tbdFunctionality(const std::string& f) { }
+ void missingFunctionality(const std::string& f) { }
+ void warning(const std::string& w) { }
+ void error(const std::string& e) { errors.push_back(e); }
+ std::string getAllMessages() { return ""; }
+#else
+
// Registers a TBD functionality.
void tbdFunctionality(const std::string& f);
// Registers a missing functionality.
@@ -59,6 +67,7 @@ public:
// Returns all messages accumulated in the order of:
// TBD functionalities, missing functionalities, warnings, errors.
std::string getAllMessages() const;
+#endif
private:
SpvBuildLogger(const SpvBuildLogger&);
diff --git a/thirdparty/glslang/SPIRV/SPVRemapper.h b/thirdparty/glslang/SPIRV/SPVRemapper.h
index fa61bb94d8..d6b9c346dd 100644
--- a/thirdparty/glslang/SPIRV/SPVRemapper.h
+++ b/thirdparty/glslang/SPIRV/SPVRemapper.h
@@ -195,7 +195,7 @@ private:
// Header access & set methods
spirword_t magic() const { return spv[0]; } // return magic number
spirword_t bound() const { return spv[3]; } // return Id bound from header
- spirword_t bound(spirword_t b) { return spv[3] = b; };
+ spirword_t bound(spirword_t b) { return spv[3] = b; }
spirword_t genmagic() const { return spv[2]; } // generator magic
spirword_t genmagic(spirword_t m) { return spv[2] = m; }
spirword_t schemaNum() const { return spv[4]; } // schema number from header
diff --git a/thirdparty/glslang/SPIRV/SpvBuilder.cpp b/thirdparty/glslang/SPIRV/SpvBuilder.cpp
index 4ef7e5fe7f..bd208952e0 100644
--- a/thirdparty/glslang/SPIRV/SpvBuilder.cpp
+++ b/thirdparty/glslang/SPIRV/SpvBuilder.cpp
@@ -46,7 +46,9 @@
#include "SpvBuilder.h"
+#ifndef GLSLANG_WEB
#include "hex_float.h"
+#endif
#ifndef _WIN32
#include <cstdio>
@@ -230,6 +232,11 @@ Id Builder::makePointerFromForwardPointer(StorageClass storageClass, Id forwardP
Id Builder::makeIntegerType(int width, bool hasSign)
{
+#ifdef GLSLANG_WEB
+ assert(width == 32);
+ width = 32;
+#endif
+
// try to find it
Instruction* type;
for (int t = 0; t < (int)groupedTypes[OpTypeInt].size(); ++t) {
@@ -265,6 +272,11 @@ Id Builder::makeIntegerType(int width, bool hasSign)
Id Builder::makeFloatType(int width)
{
+#ifdef GLSLANG_WEB
+ assert(width == 32);
+ width = 32;
+#endif
+
// try to find it
Instruction* type;
for (int t = 0; t < (int)groupedTypes[OpTypeFloat].size(); ++t) {
@@ -516,6 +528,7 @@ Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, boo
constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
module.mapInstruction(type);
+#ifndef GLSLANG_WEB
// deal with capabilities
switch (dim) {
case DimBuffer:
@@ -561,6 +574,7 @@ Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, boo
addCapability(CapabilityImageMSArray);
}
}
+#endif
return type->getResultId();
}
@@ -586,7 +600,7 @@ Id Builder::makeSampledImageType(Id imageType)
return type->getResultId();
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
Id Builder::makeAccelerationStructureNVType()
{
Instruction *type;
@@ -602,6 +616,7 @@ Id Builder::makeAccelerationStructureNVType()
return type->getResultId();
}
#endif
+
Id Builder::getDerefTypeId(Id resultId) const
{
Id typeId = getTypeId(resultId);
@@ -939,6 +954,10 @@ Id Builder::makeFloatConstant(float f, bool specConstant)
Id Builder::makeDoubleConstant(double d, bool specConstant)
{
+#ifdef GLSLANG_WEB
+ assert(0);
+ return NoResult;
+#else
Op opcode = specConstant ? OpSpecConstant : OpConstant;
Id typeId = makeFloatType(64);
union { double db; unsigned long long ull; } u;
@@ -963,10 +982,15 @@ Id Builder::makeDoubleConstant(double d, bool specConstant)
module.mapInstruction(c);
return c->getResultId();
+#endif
}
Id Builder::makeFloat16Constant(float f16, bool specConstant)
{
+#ifdef GLSLANG_WEB
+ assert(0);
+ return NoResult;
+#else
Op opcode = specConstant ? OpSpecConstant : OpConstant;
Id typeId = makeFloatType(16);
@@ -991,25 +1015,33 @@ Id Builder::makeFloat16Constant(float f16, bool specConstant)
module.mapInstruction(c);
return c->getResultId();
+#endif
}
Id Builder::makeFpConstant(Id type, double d, bool specConstant)
{
- assert(isFloatType(type));
+#ifdef GLSLANG_WEB
+ const int width = 32;
+ assert(width == getScalarTypeWidth(type));
+#else
+ const int width = getScalarTypeWidth(type);
+#endif
- switch (getScalarTypeWidth(type)) {
- case 16:
- return makeFloat16Constant((float)d, specConstant);
- case 32:
- return makeFloatConstant((float)d, specConstant);
- case 64:
- return makeDoubleConstant(d, specConstant);
- default:
- break;
- }
+ assert(isFloatType(type));
- assert(false);
- return NoResult;
+ switch (width) {
+ case 16:
+ return makeFloat16Constant((float)d, specConstant);
+ case 32:
+ return makeFloatConstant((float)d, specConstant);
+ case 64:
+ return makeDoubleConstant(d, specConstant);
+ default:
+ break;
+ }
+
+ assert(false);
+ return NoResult;
}
Id Builder::findCompositeConstant(Op typeClass, Id typeId, const std::vector<Id>& comps)
@@ -1825,7 +1857,7 @@ Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse,
if (parameters.component != NoResult)
texArgs[numArgs++] = parameters.component;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
if (parameters.granularity != NoResult)
texArgs[numArgs++] = parameters.granularity;
if (parameters.coarse != NoResult)
@@ -1872,6 +1904,7 @@ Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse,
mask = (ImageOperandsMask)(mask | ImageOperandsConstOffsetsMask);
texArgs[numArgs++] = parameters.offsets;
}
+#ifndef GLSLANG_WEB
if (parameters.sample) {
mask = (ImageOperandsMask)(mask | ImageOperandsSampleMask);
texArgs[numArgs++] = parameters.sample;
@@ -1889,6 +1922,7 @@ Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse,
if (parameters.volatil) {
mask = mask | ImageOperandsVolatileTexelKHRMask;
}
+#endif
mask = mask | signExtensionMask;
if (mask == ImageOperandsMaskNone)
--numArgs; // undo speculative reservation for the mask argument
@@ -1904,10 +1938,9 @@ Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse,
opCode = OpImageSparseFetch;
else
opCode = OpImageFetch;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
} else if (parameters.granularity && parameters.coarse) {
opCode = OpImageSampleFootprintNV;
-#endif
} else if (gather) {
if (parameters.Dref)
if (sparse)
@@ -1919,6 +1952,7 @@ Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse,
opCode = OpImageSparseGather;
else
opCode = OpImageGather;
+#endif
} else if (explicitLod) {
if (parameters.Dref) {
if (proj)
@@ -2067,11 +2101,7 @@ Id Builder::createTextureQueryCall(Op opCode, const TextureParameters& parameter
break;
}
case OpImageQueryLod:
-#ifdef AMD_EXTENSIONS
resultType = makeVectorType(getScalarTypeId(getTypeId(parameters.coords)), 2);
-#else
- resultType = makeVectorType(makeFloatType(32), 2);
-#endif
break;
case OpImageQueryLevels:
case OpImageQuerySamples:
@@ -2089,6 +2119,7 @@ Id Builder::createTextureQueryCall(Op opCode, const TextureParameters& parameter
if (parameters.lod)
query->addIdOperand(parameters.lod);
buildPoint->addInstruction(std::unique_ptr<Instruction>(query));
+ addCapability(CapabilityImageQuery);
return query->getResultId();
}
@@ -2282,7 +2313,12 @@ Id Builder::createMatrixConstructor(Decoration precision, const std::vector<Id>&
int numRows = getTypeNumRows(resultTypeId);
Instruction* instr = module.getInstruction(componentTypeId);
- unsigned bitCount = instr->getImmediateOperand(0);
+#ifdef GLSLANG_WEB
+ const unsigned bitCount = 32;
+ assert(bitCount == instr->getImmediateOperand(0));
+#else
+ const unsigned bitCount = instr->getImmediateOperand(0);
+#endif
// Optimize matrix constructed from a bigger matrix
if (isMatrix(sources[0]) && getNumColumns(sources[0]) >= numCols && getNumRows(sources[0]) >= numRows) {
diff --git a/thirdparty/glslang/SPIRV/SpvBuilder.h b/thirdparty/glslang/SPIRV/SpvBuilder.h
index faed8e8230..31fee975fc 100644
--- a/thirdparty/glslang/SPIRV/SpvBuilder.h
+++ b/thirdparty/glslang/SPIRV/SpvBuilder.h
@@ -67,6 +67,7 @@ typedef enum {
Spv_1_2 = (1 << 16) | (2 << 8),
Spv_1_3 = (1 << 16) | (3 << 8),
Spv_1_4 = (1 << 16) | (4 << 8),
+ Spv_1_5 = (1 << 16) | (5 << 8),
} SpvVersion;
class Builder {
@@ -105,6 +106,20 @@ public:
void addModuleProcessed(const std::string& p) { moduleProcesses.push_back(p.c_str()); }
void setEmitOpLines() { emitOpLines = true; }
void addExtension(const char* ext) { extensions.insert(ext); }
+ void removeExtension(const char* ext)
+ {
+ extensions.erase(ext);
+ }
+ void addIncorporatedExtension(const char* ext, SpvVersion incorporatedVersion)
+ {
+ if (getSpvVersion() < static_cast<unsigned>(incorporatedVersion))
+ addExtension(ext);
+ }
+ void promoteIncorporatedExtension(const char* baseExt, const char* promoExt, SpvVersion incorporatedVersion)
+ {
+ removeExtension(baseExt);
+ addIncorporatedExtension(promoExt, incorporatedVersion);
+ }
void addInclude(const std::string& name, const std::string& text)
{
spv::Id incId = getStringId(name);
@@ -201,7 +216,11 @@ public:
bool isMatrixType(Id typeId) const { return getTypeClass(typeId) == OpTypeMatrix; }
bool isStructType(Id typeId) const { return getTypeClass(typeId) == OpTypeStruct; }
bool isArrayType(Id typeId) const { return getTypeClass(typeId) == OpTypeArray; }
+#ifdef GLSLANG_WEB
+ bool isCooperativeMatrixType(Id typeId)const { return false; }
+#else
bool isCooperativeMatrixType(Id typeId)const { return getTypeClass(typeId) == OpTypeCooperativeMatrixNV; }
+#endif
bool isAggregateType(Id typeId) const { return isArrayType(typeId) || isStructType(typeId) || isCooperativeMatrixType(typeId); }
bool isImageType(Id typeId) const { return getTypeClass(typeId) == OpTypeImage; }
bool isSamplerType(Id typeId) const { return getTypeClass(typeId) == OpTypeSampler; }
@@ -557,6 +576,14 @@ public:
// Accumulate whether anything in the chain of structures has coherent decorations.
struct CoherentFlags {
+ CoherentFlags() { clear(); }
+#ifdef GLSLANG_WEB
+ void clear() { }
+ bool isVolatile() const { return false; }
+ CoherentFlags operator |=(const CoherentFlags &other) { return *this; }
+#else
+ bool isVolatile() const { return volatil; }
+
unsigned coherent : 1;
unsigned devicecoherent : 1;
unsigned queuefamilycoherent : 1;
@@ -577,7 +604,6 @@ public:
isImage = 0;
}
- CoherentFlags() { clear(); }
CoherentFlags operator |=(const CoherentFlags &other) {
coherent |= other.coherent;
devicecoherent |= other.devicecoherent;
@@ -589,6 +615,7 @@ public:
isImage |= other.isImage;
return *this;
}
+#endif
};
CoherentFlags coherentFlags;
};
@@ -656,16 +683,21 @@ public:
// based on the type of the base and the chain of dereferences.
Id accessChainGetInferredType();
- // Add capabilities, extensions, remove unneeded decorations, etc.,
+ // Add capabilities, extensions, remove unneeded decorations, etc.,
// based on the resulting SPIR-V.
void postProcess();
+ // Prune unreachable blocks in the CFG and remove unneeded decorations.
+ void postProcessCFG();
+
+#ifndef GLSLANG_WEB
+ // Add capabilities, extensions based on instructions in the module.
+ void postProcessFeatures();
// Hook to visit each instruction in a block in a function
void postProcess(Instruction&);
- // Hook to visit each instruction in a reachable block in a function.
- void postProcessReachable(const Instruction&);
// Hook to visit each non-32-bit sized float/int operation in a block.
void postProcessType(const Instruction&, spv::Id typeId);
+#endif
void dump(std::vector<unsigned int>&) const;
diff --git a/thirdparty/glslang/SPIRV/SpvPostProcess.cpp b/thirdparty/glslang/SPIRV/SpvPostProcess.cpp
index 6e1f7cf61f..d40174d172 100644
--- a/thirdparty/glslang/SPIRV/SpvPostProcess.cpp
+++ b/thirdparty/glslang/SPIRV/SpvPostProcess.cpp
@@ -39,6 +39,7 @@
#include <cassert>
#include <cstdlib>
+#include <unordered_map>
#include <unordered_set>
#include <algorithm>
@@ -51,16 +52,13 @@ namespace spv {
#include "GLSL.std.450.h"
#include "GLSL.ext.KHR.h"
#include "GLSL.ext.EXT.h"
-#ifdef AMD_EXTENSIONS
#include "GLSL.ext.AMD.h"
-#endif
-#ifdef NV_EXTENSIONS
#include "GLSL.ext.NV.h"
-#endif
}
namespace spv {
+#ifndef GLSLANG_WEB
// Hook to visit each operand type and result type of an instruction.
// Will be called multiple times for one instruction, once for each typed
// operand and the result.
@@ -160,7 +158,6 @@ void Builder::postProcessType(const Instruction& inst, Id typeId)
}
break;
case OpExtInst:
-#if AMD_EXTENSIONS
switch (inst.getImmediateOperand(1)) {
case GLSLstd450Frexp:
case GLSLstd450FrexpStruct:
@@ -176,7 +173,6 @@ void Builder::postProcessType(const Instruction& inst, Id typeId)
default:
break;
}
-#endif
break;
default:
if (basicTypeOp == OpTypeFloat && width == 16)
@@ -222,12 +218,10 @@ void Builder::postProcess(Instruction& inst)
addCapability(CapabilityImageQuery);
break;
-#ifdef NV_EXTENSIONS
case OpGroupNonUniformPartitionNV:
addExtension(E_SPV_NV_shader_subgroup_partitioned);
addCapability(CapabilityGroupNonUniformPartitionedNV);
break;
-#endif
case OpLoad:
case OpStore:
@@ -326,17 +320,16 @@ void Builder::postProcess(Instruction& inst)
}
}
}
-
-// Called for each instruction in a reachable block.
-void Builder::postProcessReachable(const Instruction&)
-{
- // did have code here, but questionable to do so without deleting the instructions
-}
+#endif
// comment in header
-void Builder::postProcess()
+void Builder::postProcessCFG()
{
+ // reachableBlocks is the set of blockss reached via control flow, or which are
+ // unreachable continue targert or unreachable merge.
std::unordered_set<const Block*> reachableBlocks;
+ std::unordered_map<Block*, Block*> headerForUnreachableContinue;
+ std::unordered_set<Block*> unreachableMerges;
std::unordered_set<Id> unreachableDefinitions;
// Collect IDs defined in unreachable blocks. For each function, label the
// reachable blocks first. Then for each unreachable block, collect the
@@ -344,16 +337,41 @@ void Builder::postProcess()
for (auto fi = module.getFunctions().cbegin(); fi != module.getFunctions().cend(); fi++) {
Function* f = *fi;
Block* entry = f->getEntryBlock();
- inReadableOrder(entry, [&reachableBlocks](const Block* b) { reachableBlocks.insert(b); });
+ inReadableOrder(entry,
+ [&reachableBlocks, &unreachableMerges, &headerForUnreachableContinue]
+ (Block* b, ReachReason why, Block* header) {
+ reachableBlocks.insert(b);
+ if (why == ReachDeadContinue) headerForUnreachableContinue[b] = header;
+ if (why == ReachDeadMerge) unreachableMerges.insert(b);
+ });
for (auto bi = f->getBlocks().cbegin(); bi != f->getBlocks().cend(); bi++) {
Block* b = *bi;
- if (reachableBlocks.count(b) == 0) {
- for (auto ii = b->getInstructions().cbegin(); ii != b->getInstructions().cend(); ii++)
+ if (unreachableMerges.count(b) != 0 || headerForUnreachableContinue.count(b) != 0) {
+ auto ii = b->getInstructions().cbegin();
+ ++ii; // Keep potential decorations on the label.
+ for (; ii != b->getInstructions().cend(); ++ii)
+ unreachableDefinitions.insert(ii->get()->getResultId());
+ } else if (reachableBlocks.count(b) == 0) {
+ // The normal case for unreachable code. All definitions are considered dead.
+ for (auto ii = b->getInstructions().cbegin(); ii != b->getInstructions().cend(); ++ii)
unreachableDefinitions.insert(ii->get()->getResultId());
}
}
}
+ // Modify unreachable merge blocks and unreachable continue targets.
+ // Delete their contents.
+ for (auto mergeIter = unreachableMerges.begin(); mergeIter != unreachableMerges.end(); ++mergeIter) {
+ (*mergeIter)->rewriteAsCanonicalUnreachableMerge();
+ }
+ for (auto continueIter = headerForUnreachableContinue.begin();
+ continueIter != headerForUnreachableContinue.end();
+ ++continueIter) {
+ Block* continue_target = continueIter->first;
+ Block* header = continueIter->second;
+ continue_target->rewriteAsCanonicalUnreachableContinue(header);
+ }
+
// Remove unneeded decorations, for unreachable instructions
decorations.erase(std::remove_if(decorations.begin(), decorations.end(),
[&unreachableDefinitions](std::unique_ptr<Instruction>& I) -> bool {
@@ -361,7 +379,11 @@ void Builder::postProcess()
return unreachableDefinitions.count(decoration_id) != 0;
}),
decorations.end());
+}
+#ifndef GLSLANG_WEB
+// comment in header
+void Builder::postProcessFeatures() {
// Add per-instruction capabilities, extensions, etc.,
// Look for any 8/16 bit type in physical storage buffer class, and set the
@@ -371,24 +393,17 @@ void Builder::postProcess()
Instruction* type = groupedTypes[OpTypePointer][t];
if (type->getImmediateOperand(0) == (unsigned)StorageClassPhysicalStorageBufferEXT) {
if (containsType(type->getIdOperand(1), OpTypeInt, 8)) {
- addExtension(spv::E_SPV_KHR_8bit_storage);
+ addIncorporatedExtension(spv::E_SPV_KHR_8bit_storage, spv::Spv_1_5);
addCapability(spv::CapabilityStorageBuffer8BitAccess);
}
if (containsType(type->getIdOperand(1), OpTypeInt, 16) ||
containsType(type->getIdOperand(1), OpTypeFloat, 16)) {
- addExtension(spv::E_SPV_KHR_16bit_storage);
+ addIncorporatedExtension(spv::E_SPV_KHR_16bit_storage, spv::Spv_1_3);
addCapability(spv::CapabilityStorageBuffer16BitAccess);
}
}
}
- // process all reachable instructions...
- for (auto bi = reachableBlocks.cbegin(); bi != reachableBlocks.cend(); ++bi) {
- const Block* block = *bi;
- const auto function = [this](const std::unique_ptr<Instruction>& inst) { postProcessReachable(*inst.get()); };
- std::for_each(block->getInstructions().begin(), block->getInstructions().end(), function);
- }
-
// process all block-contained instructions
for (auto fi = module.getFunctions().cbegin(); fi != module.getFunctions().cend(); fi++) {
Function* f = *fi;
@@ -422,5 +437,14 @@ void Builder::postProcess()
}
}
}
+#endif
+
+// comment in header
+void Builder::postProcess() {
+ postProcessCFG();
+#ifndef GLSLANG_WEB
+ postProcessFeatures();
+#endif
+}
}; // end spv namespace
diff --git a/thirdparty/glslang/SPIRV/SpvTools.cpp b/thirdparty/glslang/SPIRV/SpvTools.cpp
index db26d59089..97bd4e7427 100644
--- a/thirdparty/glslang/SPIRV/SpvTools.cpp
+++ b/thirdparty/glslang/SPIRV/SpvTools.cpp
@@ -67,6 +67,8 @@ spv_target_env MapToSpirvToolsEnv(const SpvVersion& spvVersion, spv::SpvBuildLog
logger->missingFunctionality("Target version for SPIRV-Tools validator");
return spv_target_env::SPV_ENV_VULKAN_1_1;
}
+ case glslang::EShTargetVulkan_1_2:
+ return spv_target_env::SPV_ENV_VULKAN_1_2;
default:
break;
}
@@ -103,7 +105,7 @@ void SpirvToolsDisassemble(std::ostream& out, const std::vector<unsigned int>& s
// Apply the SPIRV-Tools validator to generated SPIR-V.
void SpirvToolsValidate(const glslang::TIntermediate& intermediate, std::vector<unsigned int>& spirv,
- spv::SpvBuildLogger* logger)
+ spv::SpvBuildLogger* logger, bool prelegalization)
{
// validate
spv_context context = spvContextCreate(MapToSpirvToolsEnv(intermediate.getSpv(), logger));
@@ -111,6 +113,7 @@ void SpirvToolsValidate(const glslang::TIntermediate& intermediate, std::vector<
spv_diagnostic diagnostic = nullptr;
spv_validator_options options = spvValidatorOptionsCreate();
spvValidatorOptionsSetRelaxBlockLayout(options, intermediate.usingHlslOffsets());
+ spvValidatorOptionsSetBeforeHlslLegalization(options, prelegalization);
spvValidateWithOptions(context, options, &binary, &diagnostic);
// report
@@ -172,6 +175,7 @@ void SpirvToolsLegalize(const glslang::TIntermediate&, std::vector<unsigned int>
if (options->generateDebugInfo) {
optimizer.RegisterPass(spvtools::CreatePropagateLineInfoPass());
}
+ optimizer.RegisterPass(spvtools::CreateWrapOpKillPass());
optimizer.RegisterPass(spvtools::CreateDeadBranchElimPass());
optimizer.RegisterPass(spvtools::CreateMergeReturnPass());
optimizer.RegisterPass(spvtools::CreateInlineExhaustivePass());
@@ -195,8 +199,6 @@ void SpirvToolsLegalize(const glslang::TIntermediate&, std::vector<unsigned int>
optimizer.RegisterPass(spvtools::CreateDeadInsertElimPass());
if (options->optimizeSize) {
optimizer.RegisterPass(spvtools::CreateRedundancyEliminationPass());
- // TODO(greg-lunarg): Add this when AMD driver issues are resolved
- // optimizer.RegisterPass(CreateCommonUniformElimPass());
}
optimizer.RegisterPass(spvtools::CreateAggressiveDCEPass());
optimizer.RegisterPass(spvtools::CreateCFGCleanupPass());
diff --git a/thirdparty/glslang/SPIRV/SpvTools.h b/thirdparty/glslang/SPIRV/SpvTools.h
index 7e49ae0b30..59c914da0b 100644
--- a/thirdparty/glslang/SPIRV/SpvTools.h
+++ b/thirdparty/glslang/SPIRV/SpvTools.h
@@ -41,10 +41,12 @@
#ifndef GLSLANG_SPV_TOOLS_H
#define GLSLANG_SPV_TOOLS_H
+#ifdef ENABLE_OPT
#include <vector>
#include <ostream>
+#endif
-#include "../glslang/MachineIndependent/localintermediate.h"
+#include "glslang/MachineIndependent/localintermediate.h"
#include "Logger.h"
namespace glslang {
@@ -59,14 +61,14 @@ struct SpvOptions {
bool validate;
};
-#if ENABLE_OPT
+#ifdef ENABLE_OPT
// Use the SPIRV-Tools disassembler to print SPIR-V.
void SpirvToolsDisassemble(std::ostream& out, const std::vector<unsigned int>& spirv);
// Apply the SPIRV-Tools validator to generated SPIR-V.
void SpirvToolsValidate(const glslang::TIntermediate& intermediate, std::vector<unsigned int>& spirv,
- spv::SpvBuildLogger*);
+ spv::SpvBuildLogger*, bool prelegalization);
// Apply the SPIRV-Tools optimizer to generated SPIR-V, for the purpose of
// legalizing HLSL SPIR-V.
diff --git a/thirdparty/glslang/SPIRV/disassemble.cpp b/thirdparty/glslang/SPIRV/disassemble.cpp
index 631173c0ec..930e799493 100644
--- a/thirdparty/glslang/SPIRV/disassemble.cpp
+++ b/thirdparty/glslang/SPIRV/disassemble.cpp
@@ -52,26 +52,16 @@ namespace spv {
extern "C" {
// Include C-based headers that don't have a namespace
#include "GLSL.std.450.h"
-#ifdef AMD_EXTENSIONS
#include "GLSL.ext.AMD.h"
-#endif
-
-#ifdef NV_EXTENSIONS
#include "GLSL.ext.NV.h"
-#endif
}
}
const char* GlslStd450DebugNames[spv::GLSLstd450Count];
namespace spv {
-#ifdef AMD_EXTENSIONS
static const char* GLSLextAMDGetDebugNames(const char*, unsigned);
-#endif
-
-#ifdef NV_EXTENSIONS
static const char* GLSLextNVGetDebugNames(const char*, unsigned);
-#endif
static void Kill(std::ostream& out, const char* message)
{
@@ -82,15 +72,8 @@ static void Kill(std::ostream& out, const char* message)
// used to identify the extended instruction library imported when printing
enum ExtInstSet {
GLSL450Inst,
-
-#ifdef AMD_EXTENSIONS
GLSLextAMDInst,
-#endif
-
-#ifdef NV_EXTENSIONS
GLSLextNVInst,
-#endif
-
OpenCLExtInst,
};
@@ -499,37 +482,29 @@ void SpirvStream::disassembleInstruction(Id resultId, Id /*typeId*/, Op opCode,
const char* name = idDescriptor[stream[word - 2]].c_str();
if (0 == memcmp("OpenCL", name, 6)) {
extInstSet = OpenCLExtInst;
-#ifdef AMD_EXTENSIONS
} else if (strcmp(spv::E_SPV_AMD_shader_ballot, name) == 0 ||
strcmp(spv::E_SPV_AMD_shader_trinary_minmax, name) == 0 ||
strcmp(spv::E_SPV_AMD_shader_explicit_vertex_parameter, name) == 0 ||
strcmp(spv::E_SPV_AMD_gcn_shader, name) == 0) {
extInstSet = GLSLextAMDInst;
-#endif
-#ifdef NV_EXTENSIONS
- }else if (strcmp(spv::E_SPV_NV_sample_mask_override_coverage, name) == 0 ||
+ } else if (strcmp(spv::E_SPV_NV_sample_mask_override_coverage, name) == 0 ||
strcmp(spv::E_SPV_NV_geometry_shader_passthrough, name) == 0 ||
strcmp(spv::E_SPV_NV_viewport_array2, name) == 0 ||
strcmp(spv::E_SPV_NVX_multiview_per_view_attributes, name) == 0 ||
strcmp(spv::E_SPV_NV_fragment_shader_barycentric, name) == 0 ||
strcmp(spv::E_SPV_NV_mesh_shader, name) == 0) {
extInstSet = GLSLextNVInst;
-#endif
}
unsigned entrypoint = stream[word - 1];
if (extInstSet == GLSL450Inst) {
if (entrypoint < GLSLstd450Count) {
out << "(" << GlslStd450DebugNames[entrypoint] << ")";
}
-#ifdef AMD_EXTENSIONS
} else if (extInstSet == GLSLextAMDInst) {
out << "(" << GLSLextAMDGetDebugNames(name, entrypoint) << ")";
-#endif
-#ifdef NV_EXTENSIONS
}
else if (extInstSet == GLSLextNVInst) {
out << "(" << GLSLextNVGetDebugNames(name, entrypoint) << ")";
-#endif
}
}
break;
@@ -648,9 +623,11 @@ static void GLSLstd450GetDebugNames(const char** names)
names[GLSLstd450InterpolateAtCentroid] = "InterpolateAtCentroid";
names[GLSLstd450InterpolateAtSample] = "InterpolateAtSample";
names[GLSLstd450InterpolateAtOffset] = "InterpolateAtOffset";
+ names[GLSLstd450NMin] = "NMin";
+ names[GLSLstd450NMax] = "NMax";
+ names[GLSLstd450NClamp] = "NClamp";
}
-#ifdef AMD_EXTENSIONS
static const char* GLSLextAMDGetDebugNames(const char* name, unsigned entrypoint)
{
if (strcmp(name, spv::E_SPV_AMD_shader_ballot) == 0) {
@@ -692,18 +669,17 @@ static const char* GLSLextAMDGetDebugNames(const char* name, unsigned entrypoint
return "Bad";
}
-#endif
-#ifdef NV_EXTENSIONS
static const char* GLSLextNVGetDebugNames(const char* name, unsigned entrypoint)
{
if (strcmp(name, spv::E_SPV_NV_sample_mask_override_coverage) == 0 ||
strcmp(name, spv::E_SPV_NV_geometry_shader_passthrough) == 0 ||
strcmp(name, spv::E_ARB_shader_viewport_layer_array) == 0 ||
strcmp(name, spv::E_SPV_NV_viewport_array2) == 0 ||
- strcmp(spv::E_SPV_NVX_multiview_per_view_attributes, name) == 0 ||
- strcmp(spv::E_SPV_NV_fragment_shader_barycentric, name) == 0 ||
- strcmp(name, spv::E_SPV_NV_mesh_shader) == 0) {
+ strcmp(name, spv::E_SPV_NVX_multiview_per_view_attributes) == 0 ||
+ strcmp(name, spv::E_SPV_NV_fragment_shader_barycentric) == 0 ||
+ strcmp(name, spv::E_SPV_NV_mesh_shader) == 0 ||
+ strcmp(name, spv::E_SPV_NV_shader_image_footprint) == 0) {
switch (entrypoint) {
// NV builtins
case BuiltInViewportMaskNV: return "ViewportMaskNV";
@@ -729,6 +705,8 @@ static const char* GLSLextNVGetDebugNames(const char* name, unsigned entrypoint)
case CapabilityPerViewAttributesNV: return "PerViewAttributesNV";
case CapabilityFragmentBarycentricNV: return "FragmentBarycentricNV";
case CapabilityMeshShadingNV: return "MeshShadingNV";
+ case CapabilityImageFootprintNV: return "ImageFootprintNV";
+ case CapabilitySampleMaskOverrideCoverageNV:return "SampleMaskOverrideCoverageNV";
// NV Decorations
case DecorationOverrideCoverageNV: return "OverrideCoverageNV";
@@ -745,7 +723,6 @@ static const char* GLSLextNVGetDebugNames(const char* name, unsigned entrypoint)
}
return "Bad";
}
-#endif
void Disassemble(std::ostream& out, const std::vector<unsigned int>& stream)
{
diff --git a/thirdparty/glslang/SPIRV/doc.cpp b/thirdparty/glslang/SPIRV/doc.cpp
index 3b85767216..bee5c79729 100644
--- a/thirdparty/glslang/SPIRV/doc.cpp
+++ b/thirdparty/glslang/SPIRV/doc.cpp
@@ -50,12 +50,8 @@ namespace spv {
// Include C-based headers that don't have a namespace
#include "GLSL.ext.KHR.h"
#include "GLSL.ext.EXT.h"
-#ifdef AMD_EXTENSIONS
#include "GLSL.ext.AMD.h"
-#endif
-#ifdef NV_EXTENSIONS
#include "GLSL.ext.NV.h"
-#endif
}
}
@@ -98,22 +94,17 @@ const char* ExecutionModelString(int model)
case 4: return "Fragment";
case 5: return "GLCompute";
case 6: return "Kernel";
-#ifdef NV_EXTENSIONS
case ExecutionModelTaskNV: return "TaskNV";
case ExecutionModelMeshNV: return "MeshNV";
-#endif
default: return "Bad";
-#ifdef NV_EXTENSIONS
case ExecutionModelRayGenerationNV: return "RayGenerationNV";
case ExecutionModelIntersectionNV: return "IntersectionNV";
case ExecutionModelAnyHitNV: return "AnyHitNV";
case ExecutionModelClosestHitNV: return "ClosestHitNV";
case ExecutionModelMissNV: return "MissNV";
case ExecutionModelCallableNV: return "CallableNV";
-#endif
-
}
}
@@ -183,13 +174,18 @@ const char* ExecutionModeString(int mode)
case 4446: return "PostDepthCoverage";
-#ifdef NV_EXTENSIONS
case ExecutionModeOutputLinesNV: return "OutputLinesNV";
case ExecutionModeOutputPrimitivesNV: return "OutputPrimitivesNV";
case ExecutionModeOutputTrianglesNV: return "OutputTrianglesNV";
case ExecutionModeDerivativeGroupQuadsNV: return "DerivativeGroupQuadsNV";
case ExecutionModeDerivativeGroupLinearNV: return "DerivativeGroupLinearNV";
-#endif
+
+ case ExecutionModePixelInterlockOrderedEXT: return "PixelInterlockOrderedEXT";
+ case ExecutionModePixelInterlockUnorderedEXT: return "PixelInterlockUnorderedEXT";
+ case ExecutionModeSampleInterlockOrderedEXT: return "SampleInterlockOrderedEXT";
+ case ExecutionModeSampleInterlockUnorderedEXT: return "SampleInterlockUnorderedEXT";
+ case ExecutionModeShadingRateInterlockOrderedEXT: return "ShadingRateInterlockOrderedEXT";
+ case ExecutionModeShadingRateInterlockUnorderedEXT: return "ShadingRateInterlockUnorderedEXT";
case ExecutionModeCeiling:
default: return "Bad";
@@ -213,14 +209,12 @@ const char* StorageClassString(int StorageClass)
case 11: return "Image";
case 12: return "StorageBuffer";
-#ifdef NV_EXTENSIONS
case StorageClassRayPayloadNV: return "RayPayloadNV";
case StorageClassHitAttributeNV: return "HitAttributeNV";
case StorageClassIncomingRayPayloadNV: return "IncomingRayPayloadNV";
case StorageClassShaderRecordBufferNV: return "ShaderRecordBufferNV";
case StorageClassCallableDataNV: return "CallableDataNV";
case StorageClassIncomingCallableDataNV: return "IncomingCallableDataNV";
-#endif
case StorageClassPhysicalStorageBufferEXT: return "PhysicalStorageBufferEXT";
@@ -282,10 +276,7 @@ const char* DecorationString(int decoration)
case DecorationCeiling:
default: return "Bad";
-#ifdef AMD_EXTENSIONS
case DecorationExplicitInterpAMD: return "ExplicitInterpAMD";
-#endif
-#ifdef NV_EXTENSIONS
case DecorationOverrideCoverageNV: return "OverrideCoverageNV";
case DecorationPassthroughNV: return "PassthroughNV";
case DecorationViewportRelativeNV: return "ViewportRelativeNV";
@@ -294,7 +285,6 @@ const char* DecorationString(int decoration)
case DecorationPerViewNV: return "PerViewNV";
case DecorationPerTaskNV: return "PerTaskNV";
case DecorationPerVertexNV: return "PerVertexNV";
-#endif
case DecorationNonUniformEXT: return "DecorationNonUniformEXT";
case DecorationHlslCounterBufferGOOGLE: return "DecorationHlslCounterBufferGOOGLE";
@@ -364,7 +354,6 @@ const char* BuiltInString(int builtIn)
case 4426: return "DrawIndex";
case 5014: return "FragStencilRefEXT";
-#ifdef AMD_EXTENSIONS
case 4992: return "BaryCoordNoPerspAMD";
case 4993: return "BaryCoordNoPerspCentroidAMD";
case 4994: return "BaryCoordNoPerspSampleAMD";
@@ -372,9 +361,6 @@ const char* BuiltInString(int builtIn)
case 4996: return "BaryCoordSmoothCentroidAMD";
case 4997: return "BaryCoordSmoothSampleAMD";
case 4998: return "BaryCoordPullModelAMD";
-#endif
-
-#ifdef NV_EXTENSIONS
case BuiltInLaunchIdNV: return "LaunchIdNV";
case BuiltInLaunchSizeNV: return "LaunchSizeNV";
case BuiltInWorldRayOriginNV: return "WorldRayOriginNV";
@@ -398,15 +384,12 @@ const char* BuiltInString(int builtIn)
// case BuiltInInvocationsPerPixelNV: return "InvocationsPerPixelNV"; // superseded by BuiltInFragInvocationCountEXT
case BuiltInBaryCoordNV: return "BaryCoordNV";
case BuiltInBaryCoordNoPerspNV: return "BaryCoordNoPerspNV";
-#endif
case BuiltInFragSizeEXT: return "FragSizeEXT";
case BuiltInFragInvocationCountEXT: return "FragInvocationCountEXT";
case 5264: return "FullyCoveredEXT";
-
-#ifdef NV_EXTENSIONS
case BuiltInTaskCountNV: return "TaskCountNV";
case BuiltInPrimitiveCountNV: return "PrimitiveCountNV";
case BuiltInPrimitiveIndicesNV: return "PrimitiveIndicesNV";
@@ -415,7 +398,10 @@ const char* BuiltInString(int builtIn)
case BuiltInLayerPerViewNV: return "LayerPerViewNV";
case BuiltInMeshViewCountNV: return "MeshViewCountNV";
case BuiltInMeshViewIndicesNV: return "MeshViewIndicesNV";
-#endif
+ case BuiltInWarpsPerSMNV: return "WarpsPerSMNV";
+ case BuiltInSMCountNV: return "SMCountNV";
+ case BuiltInWarpIDNV: return "WarpIDNV";
+ case BuiltInSMIDNV: return "SMIDNV";
default: return "Bad";
}
@@ -770,11 +756,9 @@ const char* GroupOperationString(int gop)
case GroupOperationInclusiveScan: return "InclusiveScan";
case GroupOperationExclusiveScan: return "ExclusiveScan";
case GroupOperationClusteredReduce: return "ClusteredReduce";
-#ifdef NV_EXTENSIONS
case GroupOperationPartitionedReduceNV: return "PartitionedReduceNV";
case GroupOperationPartitionedInclusiveScanNV: return "PartitionedInclusiveScanNV";
case GroupOperationPartitionedExclusiveScanNV: return "PartitionedExclusiveScanNV";
-#endif
default: return "Bad";
}
@@ -882,26 +866,23 @@ const char* CapabilityString(int info)
case CapabilityStoragePushConstant16: return "StoragePushConstant16";
case CapabilityStorageInputOutput16: return "StorageInputOutput16";
- case CapabilityStorageBuffer8BitAccess: return "CapabilityStorageBuffer8BitAccess";
- case CapabilityUniformAndStorageBuffer8BitAccess: return "CapabilityUniformAndStorageBuffer8BitAccess";
- case CapabilityStoragePushConstant8: return "CapabilityStoragePushConstant8";
+ case CapabilityStorageBuffer8BitAccess: return "StorageBuffer8BitAccess";
+ case CapabilityUniformAndStorageBuffer8BitAccess: return "UniformAndStorageBuffer8BitAccess";
+ case CapabilityStoragePushConstant8: return "StoragePushConstant8";
case CapabilityDeviceGroup: return "DeviceGroup";
case CapabilityMultiView: return "MultiView";
case CapabilityStencilExportEXT: return "StencilExportEXT";
-#ifdef AMD_EXTENSIONS
case CapabilityFloat16ImageAMD: return "Float16ImageAMD";
case CapabilityImageGatherBiasLodAMD: return "ImageGatherBiasLodAMD";
case CapabilityFragmentMaskAMD: return "FragmentMaskAMD";
case CapabilityImageReadWriteLodAMD: return "ImageReadWriteLodAMD";
-#endif
case CapabilityAtomicStorageOps: return "AtomicStorageOps";
case CapabilitySampleMaskPostDepthCoverage: return "SampleMaskPostDepthCoverage";
-#ifdef NV_EXTENSIONS
case CapabilityGeometryShaderPassthroughNV: return "GeometryShaderPassthroughNV";
case CapabilityShaderViewportIndexLayerNV: return "ShaderViewportIndexLayerNV";
case CapabilityShaderViewportMaskNV: return "ShaderViewportMaskNV";
@@ -913,33 +894,44 @@ const char* CapabilityString(int info)
case CapabilityComputeDerivativeGroupLinearNV: return "ComputeDerivativeGroupLinearNV";
case CapabilityFragmentBarycentricNV: return "FragmentBarycentricNV";
case CapabilityMeshShadingNV: return "MeshShadingNV";
-// case CapabilityShadingRateNV: return "ShadingRateNV"; // superseded by CapabilityFragmentDensityEXT
-#endif
+ case CapabilityImageFootprintNV: return "ImageFootprintNV";
+// case CapabilityShadingRateNV: return "ShadingRateNV"; // superseded by FragmentDensityEXT
+ case CapabilitySampleMaskOverrideCoverageNV: return "SampleMaskOverrideCoverageNV";
case CapabilityFragmentDensityEXT: return "FragmentDensityEXT";
case CapabilityFragmentFullyCoveredEXT: return "FragmentFullyCoveredEXT";
- case CapabilityShaderNonUniformEXT: return "CapabilityShaderNonUniformEXT";
- case CapabilityRuntimeDescriptorArrayEXT: return "CapabilityRuntimeDescriptorArrayEXT";
- case CapabilityInputAttachmentArrayDynamicIndexingEXT: return "CapabilityInputAttachmentArrayDynamicIndexingEXT";
- case CapabilityUniformTexelBufferArrayDynamicIndexingEXT: return "CapabilityUniformTexelBufferArrayDynamicIndexingEXT";
- case CapabilityStorageTexelBufferArrayDynamicIndexingEXT: return "CapabilityStorageTexelBufferArrayDynamicIndexingEXT";
- case CapabilityUniformBufferArrayNonUniformIndexingEXT: return "CapabilityUniformBufferArrayNonUniformIndexingEXT";
- case CapabilitySampledImageArrayNonUniformIndexingEXT: return "CapabilitySampledImageArrayNonUniformIndexingEXT";
- case CapabilityStorageBufferArrayNonUniformIndexingEXT: return "CapabilityStorageBufferArrayNonUniformIndexingEXT";
- case CapabilityStorageImageArrayNonUniformIndexingEXT: return "CapabilityStorageImageArrayNonUniformIndexingEXT";
- case CapabilityInputAttachmentArrayNonUniformIndexingEXT: return "CapabilityInputAttachmentArrayNonUniformIndexingEXT";
- case CapabilityUniformTexelBufferArrayNonUniformIndexingEXT: return "CapabilityUniformTexelBufferArrayNonUniformIndexingEXT";
- case CapabilityStorageTexelBufferArrayNonUniformIndexingEXT: return "CapabilityStorageTexelBufferArrayNonUniformIndexingEXT";
+ case CapabilityShaderNonUniformEXT: return "ShaderNonUniformEXT";
+ case CapabilityRuntimeDescriptorArrayEXT: return "RuntimeDescriptorArrayEXT";
+ case CapabilityInputAttachmentArrayDynamicIndexingEXT: return "InputAttachmentArrayDynamicIndexingEXT";
+ case CapabilityUniformTexelBufferArrayDynamicIndexingEXT: return "UniformTexelBufferArrayDynamicIndexingEXT";
+ case CapabilityStorageTexelBufferArrayDynamicIndexingEXT: return "StorageTexelBufferArrayDynamicIndexingEXT";
+ case CapabilityUniformBufferArrayNonUniformIndexingEXT: return "UniformBufferArrayNonUniformIndexingEXT";
+ case CapabilitySampledImageArrayNonUniformIndexingEXT: return "SampledImageArrayNonUniformIndexingEXT";
+ case CapabilityStorageBufferArrayNonUniformIndexingEXT: return "StorageBufferArrayNonUniformIndexingEXT";
+ case CapabilityStorageImageArrayNonUniformIndexingEXT: return "StorageImageArrayNonUniformIndexingEXT";
+ case CapabilityInputAttachmentArrayNonUniformIndexingEXT: return "InputAttachmentArrayNonUniformIndexingEXT";
+ case CapabilityUniformTexelBufferArrayNonUniformIndexingEXT: return "UniformTexelBufferArrayNonUniformIndexingEXT";
+ case CapabilityStorageTexelBufferArrayNonUniformIndexingEXT: return "StorageTexelBufferArrayNonUniformIndexingEXT";
+
+ case CapabilityVulkanMemoryModelKHR: return "VulkanMemoryModelKHR";
+ case CapabilityVulkanMemoryModelDeviceScopeKHR: return "VulkanMemoryModelDeviceScopeKHR";
+
+ case CapabilityPhysicalStorageBufferAddressesEXT: return "PhysicalStorageBufferAddressesEXT";
- case CapabilityVulkanMemoryModelKHR: return "CapabilityVulkanMemoryModelKHR";
- case CapabilityVulkanMemoryModelDeviceScopeKHR: return "CapabilityVulkanMemoryModelDeviceScopeKHR";
+ case CapabilityVariablePointers: return "VariablePointers";
- case CapabilityPhysicalStorageBufferAddressesEXT: return "CapabilityPhysicalStorageBufferAddressesEXT";
+ case CapabilityCooperativeMatrixNV: return "CooperativeMatrixNV";
+ case CapabilityShaderSMBuiltinsNV: return "ShaderSMBuiltinsNV";
- case CapabilityVariablePointers: return "CapabilityVariablePointers";
+ case CapabilityFragmentShaderSampleInterlockEXT: return "CapabilityFragmentShaderSampleInterlockEXT";
+ case CapabilityFragmentShaderPixelInterlockEXT: return "CapabilityFragmentShaderPixelInterlockEXT";
+ case CapabilityFragmentShaderShadingRateInterlockEXT: return "CapabilityFragmentShaderShadingRateInterlockEXT";
- case CapabilityCooperativeMatrixNV: return "CapabilityCooperativeMatrixNV";
+ case CapabilityDemoteToHelperInvocationEXT: return "DemoteToHelperInvocationEXT";
+ case CapabilityShaderClockKHR: return "ShaderClockKHR";
+
+ case CapabilityIntegerFunctions2INTEL: return "CapabilityIntegerFunctions2INTEL";
default: return "Bad";
}
@@ -1316,7 +1308,6 @@ const char* OpcodeString(int op)
case 4430: return "OpSubgroupAllEqualKHR";
case 4432: return "OpSubgroupReadInvocationKHR";
-#ifdef AMD_EXTENSIONS
case 5000: return "OpGroupIAddNonUniformAMD";
case 5001: return "OpGroupFAddNonUniformAMD";
case 5002: return "OpGroupFMinNonUniformAMD";
@@ -1328,12 +1319,12 @@ const char* OpcodeString(int op)
case 5011: return "OpFragmentMaskFetchAMD";
case 5012: return "OpFragmentFetchAMD";
-#endif
+
+ case OpReadClockKHR: return "OpReadClockKHR";
case OpDecorateStringGOOGLE: return "OpDecorateStringGOOGLE";
case OpMemberDecorateStringGOOGLE: return "OpMemberDecorateStringGOOGLE";
-#ifdef NV_EXTENSIONS
case OpGroupNonUniformPartitionNV: return "OpGroupNonUniformPartitionNV";
case OpReportIntersectionNV: return "OpReportIntersectionNV";
case OpIgnoreIntersectionNV: return "OpIgnoreIntersectionNV";
@@ -1343,13 +1334,17 @@ const char* OpcodeString(int op)
case OpExecuteCallableNV: return "OpExecuteCallableNV";
case OpImageSampleFootprintNV: return "OpImageSampleFootprintNV";
case OpWritePackedPrimitiveIndices4x8NV: return "OpWritePackedPrimitiveIndices4x8NV";
-#endif
case OpTypeCooperativeMatrixNV: return "OpTypeCooperativeMatrixNV";
case OpCooperativeMatrixLoadNV: return "OpCooperativeMatrixLoadNV";
case OpCooperativeMatrixStoreNV: return "OpCooperativeMatrixStoreNV";
case OpCooperativeMatrixMulAddNV: return "OpCooperativeMatrixMulAddNV";
case OpCooperativeMatrixLengthNV: return "OpCooperativeMatrixLengthNV";
+ case OpDemoteToHelperInvocationEXT: return "OpDemoteToHelperInvocationEXT";
+ case OpIsHelperInvocationEXT: return "OpIsHelperInvocationEXT";
+
+ case OpBeginInvocationInterlockEXT: return "OpBeginInvocationInterlockEXT";
+ case OpEndInvocationInterlockEXT: return "OpEndInvocationInterlockEXT";
default:
return "Bad";
@@ -1464,6 +1459,8 @@ void Parameterize()
InstructionDesc[OpModuleProcessed].setResultAndType(false, false);
InstructionDesc[OpTypeCooperativeMatrixNV].setResultAndType(true, false);
InstructionDesc[OpCooperativeMatrixStoreNV].setResultAndType(false, false);
+ InstructionDesc[OpBeginInvocationInterlockEXT].setResultAndType(false, false);
+ InstructionDesc[OpEndInvocationInterlockEXT].setResultAndType(false, false);
// Specific additional context-dependent operands
@@ -2656,7 +2653,6 @@ void Parameterize()
InstructionDesc[OpModuleProcessed].operands.push(OperandLiteralString, "'process'");
-#ifdef AMD_EXTENSIONS
InstructionDesc[OpGroupIAddNonUniformAMD].operands.push(OperandScope, "'Execution'");
InstructionDesc[OpGroupIAddNonUniformAMD].operands.push(OperandGroupOperation, "'Operation'");
InstructionDesc[OpGroupIAddNonUniformAMD].operands.push(OperandId, "'X'");
@@ -2695,9 +2691,7 @@ void Parameterize()
InstructionDesc[OpFragmentFetchAMD].operands.push(OperandId, "'Image'");
InstructionDesc[OpFragmentFetchAMD].operands.push(OperandId, "'Coordinate'");
InstructionDesc[OpFragmentFetchAMD].operands.push(OperandId, "'Fragment Index'");
-#endif
-#ifdef NV_EXTENSIONS
InstructionDesc[OpGroupNonUniformPartitionNV].operands.push(OperandId, "X");
InstructionDesc[OpTypeAccelerationStructureNV].setResultAndType(true, false);
@@ -2735,7 +2729,6 @@ void Parameterize()
InstructionDesc[OpWritePackedPrimitiveIndices4x8NV].operands.push(OperandId, "'Index Offset'");
InstructionDesc[OpWritePackedPrimitiveIndices4x8NV].operands.push(OperandId, "'Packed Indices'");
-#endif
InstructionDesc[OpTypeCooperativeMatrixNV].operands.push(OperandId, "'Component Type'");
InstructionDesc[OpTypeCooperativeMatrixNV].operands.push(OperandId, "'Scope'");
@@ -2762,6 +2755,10 @@ void Parameterize()
InstructionDesc[OpCooperativeMatrixMulAddNV].operands.push(OperandId, "'C'");
InstructionDesc[OpCooperativeMatrixLengthNV].operands.push(OperandId, "'Type'");
+
+ InstructionDesc[OpDemoteToHelperInvocationEXT].setResultAndType(false, false);
+
+ InstructionDesc[OpReadClockKHR].operands.push(OperandScope, "'Scope'");
}
}; // end spv namespace
diff --git a/thirdparty/glslang/SPIRV/spirv.hpp b/thirdparty/glslang/SPIRV/spirv.hpp
index 5297fd3902..1e96f7b4a9 100644
--- a/thirdparty/glslang/SPIRV/spirv.hpp
+++ b/thirdparty/glslang/SPIRV/spirv.hpp
@@ -91,6 +91,7 @@ enum AddressingModel {
AddressingModelLogical = 0,
AddressingModelPhysical32 = 1,
AddressingModelPhysical64 = 2,
+ AddressingModelPhysicalStorageBuffer64 = 5348,
AddressingModelPhysicalStorageBuffer64EXT = 5348,
AddressingModelMax = 0x7fffffff,
};
@@ -99,6 +100,7 @@ enum MemoryModel {
MemoryModelSimple = 0,
MemoryModelGLSL450 = 1,
MemoryModelOpenCL = 2,
+ MemoryModelVulkan = 3,
MemoryModelVulkanKHR = 3,
MemoryModelMax = 0x7fffffff,
};
@@ -154,6 +156,12 @@ enum ExecutionMode {
ExecutionModeDerivativeGroupQuadsNV = 5289,
ExecutionModeDerivativeGroupLinearNV = 5290,
ExecutionModeOutputTrianglesNV = 5298,
+ ExecutionModePixelInterlockOrderedEXT = 5366,
+ ExecutionModePixelInterlockUnorderedEXT = 5367,
+ ExecutionModeSampleInterlockOrderedEXT = 5368,
+ ExecutionModeSampleInterlockUnorderedEXT = 5369,
+ ExecutionModeShadingRateInterlockOrderedEXT = 5370,
+ ExecutionModeShadingRateInterlockUnorderedEXT = 5371,
ExecutionModeMax = 0x7fffffff,
};
@@ -177,6 +185,7 @@ enum StorageClass {
StorageClassHitAttributeNV = 5339,
StorageClassIncomingRayPayloadNV = 5342,
StorageClassShaderRecordBufferNV = 5343,
+ StorageClassPhysicalStorageBuffer = 5349,
StorageClassPhysicalStorageBufferEXT = 5349,
StorageClassMax = 0x7fffffff,
};
@@ -305,9 +314,13 @@ enum ImageOperandsShift {
ImageOperandsConstOffsetsShift = 5,
ImageOperandsSampleShift = 6,
ImageOperandsMinLodShift = 7,
+ ImageOperandsMakeTexelAvailableShift = 8,
ImageOperandsMakeTexelAvailableKHRShift = 8,
+ ImageOperandsMakeTexelVisibleShift = 9,
ImageOperandsMakeTexelVisibleKHRShift = 9,
+ ImageOperandsNonPrivateTexelShift = 10,
ImageOperandsNonPrivateTexelKHRShift = 10,
+ ImageOperandsVolatileTexelShift = 11,
ImageOperandsVolatileTexelKHRShift = 11,
ImageOperandsSignExtendShift = 12,
ImageOperandsZeroExtendShift = 13,
@@ -324,9 +337,13 @@ enum ImageOperandsMask {
ImageOperandsConstOffsetsMask = 0x00000020,
ImageOperandsSampleMask = 0x00000040,
ImageOperandsMinLodMask = 0x00000080,
+ ImageOperandsMakeTexelAvailableMask = 0x00000100,
ImageOperandsMakeTexelAvailableKHRMask = 0x00000100,
+ ImageOperandsMakeTexelVisibleMask = 0x00000200,
ImageOperandsMakeTexelVisibleKHRMask = 0x00000200,
+ ImageOperandsNonPrivateTexelMask = 0x00000400,
ImageOperandsNonPrivateTexelKHRMask = 0x00000400,
+ ImageOperandsVolatileTexelMask = 0x00000800,
ImageOperandsVolatileTexelKHRMask = 0x00000800,
ImageOperandsSignExtendMask = 0x00001000,
ImageOperandsZeroExtendMask = 0x00002000,
@@ -442,13 +459,17 @@ enum Decoration {
DecorationPerViewNV = 5272,
DecorationPerTaskNV = 5273,
DecorationPerVertexNV = 5285,
+ DecorationNonUniform = 5300,
DecorationNonUniformEXT = 5300,
+ DecorationRestrictPointer = 5355,
DecorationRestrictPointerEXT = 5355,
+ DecorationAliasedPointer = 5356,
DecorationAliasedPointerEXT = 5356,
DecorationCounterBuffer = 5634,
DecorationHlslCounterBufferGOOGLE = 5634,
DecorationHlslSemanticGOOGLE = 5635,
DecorationUserSemantic = 5635,
+ DecorationUserTypeGOOGLE = 5636,
DecorationMax = 0x7fffffff,
};
@@ -551,6 +572,10 @@ enum BuiltIn {
BuiltInHitTNV = 5332,
BuiltInHitKindNV = 5333,
BuiltInIncomingRayFlagsNV = 5351,
+ BuiltInWarpsPerSMNV = 5374,
+ BuiltInSMCountNV = 5375,
+ BuiltInWarpIDNV = 5376,
+ BuiltInSMIDNV = 5377,
BuiltInMax = 0x7fffffff,
};
@@ -619,9 +644,13 @@ enum MemorySemanticsShift {
MemorySemanticsCrossWorkgroupMemoryShift = 9,
MemorySemanticsAtomicCounterMemoryShift = 10,
MemorySemanticsImageMemoryShift = 11,
+ MemorySemanticsOutputMemoryShift = 12,
MemorySemanticsOutputMemoryKHRShift = 12,
+ MemorySemanticsMakeAvailableShift = 13,
MemorySemanticsMakeAvailableKHRShift = 13,
+ MemorySemanticsMakeVisibleShift = 14,
MemorySemanticsMakeVisibleKHRShift = 14,
+ MemorySemanticsVolatileShift = 15,
MemorySemanticsMax = 0x7fffffff,
};
@@ -637,17 +666,24 @@ enum MemorySemanticsMask {
MemorySemanticsCrossWorkgroupMemoryMask = 0x00000200,
MemorySemanticsAtomicCounterMemoryMask = 0x00000400,
MemorySemanticsImageMemoryMask = 0x00000800,
+ MemorySemanticsOutputMemoryMask = 0x00001000,
MemorySemanticsOutputMemoryKHRMask = 0x00001000,
+ MemorySemanticsMakeAvailableMask = 0x00002000,
MemorySemanticsMakeAvailableKHRMask = 0x00002000,
+ MemorySemanticsMakeVisibleMask = 0x00004000,
MemorySemanticsMakeVisibleKHRMask = 0x00004000,
+ MemorySemanticsVolatileMask = 0x00008000,
};
enum MemoryAccessShift {
MemoryAccessVolatileShift = 0,
MemoryAccessAlignedShift = 1,
MemoryAccessNontemporalShift = 2,
+ MemoryAccessMakePointerAvailableShift = 3,
MemoryAccessMakePointerAvailableKHRShift = 3,
+ MemoryAccessMakePointerVisibleShift = 4,
MemoryAccessMakePointerVisibleKHRShift = 4,
+ MemoryAccessNonPrivatePointerShift = 5,
MemoryAccessNonPrivatePointerKHRShift = 5,
MemoryAccessMax = 0x7fffffff,
};
@@ -657,8 +693,11 @@ enum MemoryAccessMask {
MemoryAccessVolatileMask = 0x00000001,
MemoryAccessAlignedMask = 0x00000002,
MemoryAccessNontemporalMask = 0x00000004,
+ MemoryAccessMakePointerAvailableMask = 0x00000008,
MemoryAccessMakePointerAvailableKHRMask = 0x00000008,
+ MemoryAccessMakePointerVisibleMask = 0x00000010,
MemoryAccessMakePointerVisibleKHRMask = 0x00000010,
+ MemoryAccessNonPrivatePointerMask = 0x00000020,
MemoryAccessNonPrivatePointerKHRMask = 0x00000020,
};
@@ -668,6 +707,7 @@ enum Scope {
ScopeWorkgroup = 2,
ScopeSubgroup = 3,
ScopeInvocation = 4,
+ ScopeQueueFamily = 5,
ScopeQueueFamilyKHR = 5,
ScopeMax = 0x7fffffff,
};
@@ -768,6 +808,8 @@ enum Capability {
CapabilityGroupNonUniformShuffleRelative = 66,
CapabilityGroupNonUniformClustered = 67,
CapabilityGroupNonUniformQuad = 68,
+ CapabilityShaderLayer = 69,
+ CapabilityShaderViewportIndex = 70,
CapabilitySubgroupBallotKHR = 4423,
CapabilityDrawParameters = 4427,
CapabilitySubgroupVoteKHR = 4431,
@@ -796,6 +838,7 @@ enum Capability {
CapabilityFragmentMaskAMD = 5010,
CapabilityStencilExportEXT = 5013,
CapabilityImageReadWriteLodAMD = 5015,
+ CapabilityShaderClockKHR = 5055,
CapabilitySampleMaskOverrideCoverageNV = 5249,
CapabilityGeometryShaderPassthroughNV = 5251,
CapabilityShaderViewportIndexLayerEXT = 5254,
@@ -811,28 +854,49 @@ enum Capability {
CapabilityFragmentDensityEXT = 5291,
CapabilityShadingRateNV = 5291,
CapabilityGroupNonUniformPartitionedNV = 5297,
+ CapabilityShaderNonUniform = 5301,
CapabilityShaderNonUniformEXT = 5301,
+ CapabilityRuntimeDescriptorArray = 5302,
CapabilityRuntimeDescriptorArrayEXT = 5302,
+ CapabilityInputAttachmentArrayDynamicIndexing = 5303,
CapabilityInputAttachmentArrayDynamicIndexingEXT = 5303,
+ CapabilityUniformTexelBufferArrayDynamicIndexing = 5304,
CapabilityUniformTexelBufferArrayDynamicIndexingEXT = 5304,
+ CapabilityStorageTexelBufferArrayDynamicIndexing = 5305,
CapabilityStorageTexelBufferArrayDynamicIndexingEXT = 5305,
+ CapabilityUniformBufferArrayNonUniformIndexing = 5306,
CapabilityUniformBufferArrayNonUniformIndexingEXT = 5306,
+ CapabilitySampledImageArrayNonUniformIndexing = 5307,
CapabilitySampledImageArrayNonUniformIndexingEXT = 5307,
+ CapabilityStorageBufferArrayNonUniformIndexing = 5308,
CapabilityStorageBufferArrayNonUniformIndexingEXT = 5308,
+ CapabilityStorageImageArrayNonUniformIndexing = 5309,
CapabilityStorageImageArrayNonUniformIndexingEXT = 5309,
+ CapabilityInputAttachmentArrayNonUniformIndexing = 5310,
CapabilityInputAttachmentArrayNonUniformIndexingEXT = 5310,
+ CapabilityUniformTexelBufferArrayNonUniformIndexing = 5311,
CapabilityUniformTexelBufferArrayNonUniformIndexingEXT = 5311,
+ CapabilityStorageTexelBufferArrayNonUniformIndexing = 5312,
CapabilityStorageTexelBufferArrayNonUniformIndexingEXT = 5312,
CapabilityRayTracingNV = 5340,
+ CapabilityVulkanMemoryModel = 5345,
CapabilityVulkanMemoryModelKHR = 5345,
+ CapabilityVulkanMemoryModelDeviceScope = 5346,
CapabilityVulkanMemoryModelDeviceScopeKHR = 5346,
+ CapabilityPhysicalStorageBufferAddresses = 5347,
CapabilityPhysicalStorageBufferAddressesEXT = 5347,
CapabilityComputeDerivativeGroupLinearNV = 5350,
CapabilityCooperativeMatrixNV = 5357,
+ CapabilityFragmentShaderSampleInterlockEXT = 5363,
+ CapabilityFragmentShaderShadingRateInterlockEXT = 5372,
+ CapabilityShaderSMBuiltinsNV = 5373,
+ CapabilityFragmentShaderPixelInterlockEXT = 5378,
+ CapabilityDemoteToHelperInvocationEXT = 5379,
CapabilitySubgroupShuffleINTEL = 5568,
CapabilitySubgroupBufferBlockIOINTEL = 5569,
CapabilitySubgroupImageBlockIOINTEL = 5570,
CapabilitySubgroupImageMediaBlockIOINTEL = 5579,
+ CapabilityIntegerFunctions2INTEL = 5584,
CapabilitySubgroupAvcMotionEstimationINTEL = 5696,
CapabilitySubgroupAvcMotionEstimationIntraINTEL = 5697,
CapabilitySubgroupAvcMotionEstimationChromaINTEL = 5698,
@@ -1200,6 +1264,7 @@ enum Op {
OpGroupSMaxNonUniformAMD = 5007,
OpFragmentMaskFetchAMD = 5011,
OpFragmentFetchAMD = 5012,
+ OpReadClockKHR = 5056,
OpImageSampleFootprintNV = 5283,
OpGroupNonUniformPartitionNV = 5296,
OpWritePackedPrimitiveIndices4x8NV = 5299,
@@ -1214,6 +1279,10 @@ enum Op {
OpCooperativeMatrixStoreNV = 5360,
OpCooperativeMatrixMulAddNV = 5361,
OpCooperativeMatrixLengthNV = 5362,
+ OpBeginInvocationInterlockEXT = 5364,
+ OpEndInvocationInterlockEXT = 5365,
+ OpDemoteToHelperInvocationEXT = 5380,
+ OpIsHelperInvocationEXT = 5381,
OpSubgroupShuffleINTEL = 5571,
OpSubgroupShuffleDownINTEL = 5572,
OpSubgroupShuffleUpINTEL = 5573,
@@ -1224,6 +1293,20 @@ enum Op {
OpSubgroupImageBlockWriteINTEL = 5578,
OpSubgroupImageMediaBlockReadINTEL = 5580,
OpSubgroupImageMediaBlockWriteINTEL = 5581,
+ OpUCountLeadingZerosINTEL = 5585,
+ OpUCountTrailingZerosINTEL = 5586,
+ OpAbsISubINTEL = 5587,
+ OpAbsUSubINTEL = 5588,
+ OpIAddSatINTEL = 5589,
+ OpUAddSatINTEL = 5590,
+ OpIAverageINTEL = 5591,
+ OpUAverageINTEL = 5592,
+ OpIAverageRoundedINTEL = 5593,
+ OpUAverageRoundedINTEL = 5594,
+ OpISubSatINTEL = 5595,
+ OpUSubSatINTEL = 5596,
+ OpIMul32x16INTEL = 5597,
+ OpUMul32x16INTEL = 5598,
OpDecorateString = 5632,
OpDecorateStringGOOGLE = 5632,
OpMemberDecorateString = 5633,
@@ -1714,6 +1797,7 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) {
case OpGroupSMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break;
case OpFragmentMaskFetchAMD: *hasResult = true; *hasResultType = true; break;
case OpFragmentFetchAMD: *hasResult = true; *hasResultType = true; break;
+ case OpReadClockKHR: *hasResult = true; *hasResultType = true; break;
case OpImageSampleFootprintNV: *hasResult = true; *hasResultType = true; break;
case OpGroupNonUniformPartitionNV: *hasResult = true; *hasResultType = true; break;
case OpWritePackedPrimitiveIndices4x8NV: *hasResult = false; *hasResultType = false; break;
@@ -1728,6 +1812,10 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) {
case OpCooperativeMatrixStoreNV: *hasResult = false; *hasResultType = false; break;
case OpCooperativeMatrixMulAddNV: *hasResult = true; *hasResultType = true; break;
case OpCooperativeMatrixLengthNV: *hasResult = true; *hasResultType = true; break;
+ case OpBeginInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break;
+ case OpEndInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break;
+ case OpDemoteToHelperInvocationEXT: *hasResult = false; *hasResultType = false; break;
+ case OpIsHelperInvocationEXT: *hasResult = true; *hasResultType = true; break;
case OpSubgroupShuffleINTEL: *hasResult = true; *hasResultType = true; break;
case OpSubgroupShuffleDownINTEL: *hasResult = true; *hasResultType = true; break;
case OpSubgroupShuffleUpINTEL: *hasResult = true; *hasResultType = true; break;
@@ -1738,10 +1826,22 @@ inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) {
case OpSubgroupImageBlockWriteINTEL: *hasResult = false; *hasResultType = false; break;
case OpSubgroupImageMediaBlockReadINTEL: *hasResult = true; *hasResultType = true; break;
case OpSubgroupImageMediaBlockWriteINTEL: *hasResult = false; *hasResultType = false; break;
+ case OpUCountLeadingZerosINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpUCountTrailingZerosINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpAbsISubINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpAbsUSubINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpIAddSatINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpUAddSatINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpIAverageINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpUAverageINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpIAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpUAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpISubSatINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpUSubSatINTEL: *hasResult = true; *hasResultType = true; break;
+ case OpIMul32x16INTEL: *hasResult = true; *hasResultType = true; break;
+ case OpUMul32x16INTEL: *hasResult = true; *hasResultType = true; break;
case OpDecorateString: *hasResult = false; *hasResultType = false; break;
- case OpDecorateStringGOOGLE: *hasResult = false; *hasResultType = false; break;
case OpMemberDecorateString: *hasResult = false; *hasResultType = false; break;
- case OpMemberDecorateStringGOOGLE: *hasResult = false; *hasResultType = false; break;
case OpVmeImageINTEL: *hasResult = true; *hasResultType = true; break;
case OpTypeVmeImageINTEL: *hasResult = true; *hasResultType = false; break;
case OpTypeAvcImePayloadINTEL: *hasResult = true; *hasResultType = false; break;
diff --git a/thirdparty/glslang/SPIRV/spvIR.h b/thirdparty/glslang/SPIRV/spvIR.h
index b3cd0b0613..cf6a71159a 100644..100755
--- a/thirdparty/glslang/SPIRV/spvIR.h
+++ b/thirdparty/glslang/SPIRV/spvIR.h
@@ -226,6 +226,36 @@ public:
return nullptr;
}
+ // Change this block into a canonical dead merge block. Delete instructions
+ // as necessary. A canonical dead merge block has only an OpLabel and an
+ // OpUnreachable.
+ void rewriteAsCanonicalUnreachableMerge() {
+ assert(localVariables.empty());
+ // Delete all instructions except for the label.
+ assert(instructions.size() > 0);
+ instructions.resize(1);
+ successors.clear();
+ Instruction* unreachable = new Instruction(OpUnreachable);
+ addInstruction(std::unique_ptr<Instruction>(unreachable));
+ }
+ // Change this block into a canonical dead continue target branching to the
+ // given header ID. Delete instructions as necessary. A canonical dead continue
+ // target has only an OpLabel and an unconditional branch back to the corresponding
+ // header.
+ void rewriteAsCanonicalUnreachableContinue(Block* header) {
+ assert(localVariables.empty());
+ // Delete all instructions except for the label.
+ assert(instructions.size() > 0);
+ instructions.resize(1);
+ successors.clear();
+ // Add OpBranch back to the header.
+ assert(header != nullptr);
+ Instruction* branch = new Instruction(OpBranch);
+ branch->addIdOperand(header->getId());
+ addInstruction(std::unique_ptr<Instruction>(branch));
+ successors.push_back(header);
+ }
+
bool isTerminated() const
{
switch (instructions.back()->getOpCode()) {
@@ -235,6 +265,7 @@ public:
case OpKill:
case OpReturn:
case OpReturnValue:
+ case OpUnreachable:
return true;
default:
return false;
@@ -268,10 +299,24 @@ protected:
bool unreachable;
};
+// The different reasons for reaching a block in the inReadableOrder traversal.
+enum ReachReason {
+ // Reachable from the entry block via transfers of control, i.e. branches.
+ ReachViaControlFlow = 0,
+ // A continue target that is not reachable via control flow.
+ ReachDeadContinue,
+ // A merge block that is not reachable via control flow.
+ ReachDeadMerge
+};
+
// Traverses the control-flow graph rooted at root in an order suited for
// readable code generation. Invokes callback at every node in the traversal
-// order.
-void inReadableOrder(Block* root, std::function<void(Block*)> callback);
+// order. The callback arguments are:
+// - the block,
+// - the reason we reached the block,
+// - if the reason was that block is an unreachable continue or unreachable merge block
+// then the last parameter is the corresponding header block.
+void inReadableOrder(Block* root, std::function<void(Block*, ReachReason, Block* header)> callback);
//
// SPIR-V IR Function.
@@ -321,7 +366,7 @@ public:
parameterInstructions[p]->dump(out);
// Blocks
- inReadableOrder(blocks[0], [&out](const Block* b) { b->dump(out); });
+ inReadableOrder(blocks[0], [&out](const Block* b, ReachReason, Block*) { b->dump(out); });
Instruction end(0, 0, OpFunctionEnd);
end.dump(out);
}
@@ -436,6 +481,6 @@ __inline void Block::addInstruction(std::unique_ptr<Instruction> inst)
parent.getParent().mapInstruction(raw_instruction);
}
-}; // end spv namespace
+} // end spv namespace
#endif // spvIR_H
diff --git a/thirdparty/glslang/glslang/Include/BaseTypes.h b/thirdparty/glslang/glslang/Include/BaseTypes.h
index 1827c49653..6d4b4ff8e3 100644
--- a/thirdparty/glslang/glslang/Include/BaseTypes.h
+++ b/thirdparty/glslang/glslang/Include/BaseTypes.h
@@ -61,11 +61,7 @@ enum TBasicType {
EbtSampler,
EbtStruct,
EbtBlock,
-
-#ifdef NV_EXTENSIONS
EbtAccStructNV,
-#endif
-
EbtReference,
// HLSL types that live only temporarily.
@@ -94,13 +90,11 @@ enum TStorageQualifier {
EvqBuffer, // read/write, shared with app
EvqShared, // compute shader's read/write 'shared' qualifier
-#ifdef NV_EXTENSIONS
EvqPayloadNV,
EvqPayloadInNV,
EvqHitAttrNV,
EvqCallableDataNV,
EvqCallableDataInNV,
-#endif
// parameters
EvqIn, // also, for 'in' in the grammar before we know if it's a pipeline input or an 'in' parameter
@@ -221,7 +215,6 @@ enum TBuiltInVariable {
EbvSampleMask,
EbvHelperInvocation,
-#ifdef AMD_EXTENSIONS
EbvBaryCoordNoPersp,
EbvBaryCoordNoPerspCentroid,
EbvBaryCoordNoPerspSample,
@@ -229,7 +222,6 @@ enum TBuiltInVariable {
EbvBaryCoordSmoothCentroid,
EbvBaryCoordSmoothSample,
EbvBaryCoordPullModel,
-#endif
EbvViewIndex,
EbvDeviceIndex,
@@ -237,7 +229,6 @@ enum TBuiltInVariable {
EbvFragSizeEXT,
EbvFragInvocationCountEXT,
-#ifdef NV_EXTENSIONS
EbvViewportMaskNV,
EbvSecondaryPositionNV,
EbvSecondaryViewportMaskNV,
@@ -246,7 +237,7 @@ enum TBuiltInVariable {
EbvFragFullyCoveredNV,
EbvFragmentSizeNV,
EbvInvocationsPerPixelNV,
- // raytracing
+ // ray tracing
EbvLaunchIdNV,
EbvLaunchSizeNV,
EbvInstanceCustomIndexNV,
@@ -261,8 +252,10 @@ enum TBuiltInVariable {
EbvObjectToWorldNV,
EbvWorldToObjectNV,
EbvIncomingRayFlagsNV,
+ // barycentrics
EbvBaryCoordNV,
EbvBaryCoordNoPerspNV,
+ // mesh shaders
EbvTaskCountNV,
EbvPrimitiveCountNV,
EbvPrimitiveIndicesNV,
@@ -271,7 +264,12 @@ enum TBuiltInVariable {
EbvLayerPerViewNV,
EbvMeshViewCountNV,
EbvMeshViewIndicesNV,
-#endif
+
+ // sm builtins
+ EbvWarpsPerSM,
+ EbvSMCount,
+ EbvWarpID,
+ EbvSMID,
// HLSL built-ins that live only temporarily, until they get remapped
// to one of the above.
@@ -291,6 +289,19 @@ enum TBuiltInVariable {
EbvLast
};
+// In this enum, order matters; users can assume higher precision is a bigger value
+// and EpqNone is 0.
+enum TPrecisionQualifier {
+ EpqNone = 0,
+ EpqLow,
+ EpqMedium,
+ EpqHigh
+};
+
+#ifdef GLSLANG_WEB
+__inline const char* GetStorageQualifierString(TStorageQualifier q) { return ""; }
+__inline const char* GetPrecisionQualifierString(TPrecisionQualifier p) { return ""; }
+#else
// These will show up in error messages
__inline const char* GetStorageQualifierString(TStorageQualifier q)
{
@@ -317,13 +328,11 @@ __inline const char* GetStorageQualifierString(TStorageQualifier q)
case EvqPointCoord: return "gl_PointCoord"; break;
case EvqFragColor: return "fragColor"; break;
case EvqFragDepth: return "gl_FragDepth"; break;
-#ifdef NV_EXTENSIONS
case EvqPayloadNV: return "rayPayloadNV"; break;
case EvqPayloadInNV: return "rayPayloadInNV"; break;
case EvqHitAttrNV: return "hitAttributeNV"; break;
case EvqCallableDataNV: return "callableDataNV"; break;
case EvqCallableDataInNV: return "callableDataInNV"; break;
-#endif
default: return "unknown qualifier";
}
}
@@ -338,6 +347,8 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvLocalInvocationId: return "LocalInvocationID";
case EbvGlobalInvocationId: return "GlobalInvocationID";
case EbvLocalInvocationIndex: return "LocalInvocationIndex";
+ case EbvNumSubgroups: return "NumSubgroups";
+ case EbvSubgroupID: return "SubgroupID";
case EbvSubGroupSize: return "SubGroupSize";
case EbvSubGroupInvocation: return "SubGroupInvocation";
case EbvSubGroupEqMask: return "SubGroupEqMask";
@@ -345,6 +356,13 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvSubGroupGtMask: return "SubGroupGtMask";
case EbvSubGroupLeMask: return "SubGroupLeMask";
case EbvSubGroupLtMask: return "SubGroupLtMask";
+ case EbvSubgroupSize2: return "SubgroupSize";
+ case EbvSubgroupInvocation2: return "SubgroupInvocationID";
+ case EbvSubgroupEqMask2: return "SubgroupEqMask";
+ case EbvSubgroupGeMask2: return "SubgroupGeMask";
+ case EbvSubgroupGtMask2: return "SubgroupGtMask";
+ case EbvSubgroupLeMask2: return "SubgroupLeMask";
+ case EbvSubgroupLtMask2: return "SubgroupLtMask";
case EbvVertexId: return "VertexId";
case EbvInstanceId: return "InstanceId";
case EbvVertexIndex: return "VertexIndex";
@@ -396,7 +414,6 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvSampleMask: return "SampleMaskIn";
case EbvHelperInvocation: return "HelperInvocation";
-#ifdef AMD_EXTENSIONS
case EbvBaryCoordNoPersp: return "BaryCoordNoPersp";
case EbvBaryCoordNoPerspCentroid: return "BaryCoordNoPerspCentroid";
case EbvBaryCoordNoPerspSample: return "BaryCoordNoPerspSample";
@@ -404,7 +421,6 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvBaryCoordSmoothCentroid: return "BaryCoordSmoothCentroid";
case EbvBaryCoordSmoothSample: return "BaryCoordSmoothSample";
case EbvBaryCoordPullModel: return "BaryCoordPullModel";
-#endif
case EbvViewIndex: return "ViewIndex";
case EbvDeviceIndex: return "DeviceIndex";
@@ -412,7 +428,6 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvFragSizeEXT: return "FragSizeEXT";
case EbvFragInvocationCountEXT: return "FragInvocationCountEXT";
-#ifdef NV_EXTENSIONS
case EbvViewportMaskNV: return "ViewportMaskNV";
case EbvSecondaryPositionNV: return "SecondaryPositionNV";
case EbvSecondaryViewportMaskNV: return "SecondaryViewportMaskNV";
@@ -438,6 +453,7 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvBaryCoordNV: return "BaryCoordNV";
case EbvBaryCoordNoPerspNV: return "BaryCoordNoPerspNV";
+
case EbvTaskCountNV: return "TaskCountNV";
case EbvPrimitiveCountNV: return "PrimitiveCountNV";
case EbvPrimitiveIndicesNV: return "PrimitiveIndicesNV";
@@ -446,20 +462,16 @@ __inline const char* GetBuiltInVariableString(TBuiltInVariable v)
case EbvLayerPerViewNV: return "LayerPerViewNV";
case EbvMeshViewCountNV: return "MeshViewCountNV";
case EbvMeshViewIndicesNV: return "MeshViewIndicesNV";
-#endif
+
+ case EbvWarpsPerSM: return "WarpsPerSMNV";
+ case EbvSMCount: return "SMCountNV";
+ case EbvWarpID: return "WarpIDNV";
+ case EbvSMID: return "SMIDNV";
+
default: return "unknown built-in variable";
}
}
-// In this enum, order matters; users can assume higher precision is a bigger value
-// and EpqNone is 0.
-enum TPrecisionQualifier {
- EpqNone = 0,
- EpqLow,
- EpqMedium,
- EpqHigh
-};
-
__inline const char* GetPrecisionQualifierString(TPrecisionQualifier p)
{
switch (p) {
@@ -470,6 +482,7 @@ __inline const char* GetPrecisionQualifierString(TPrecisionQualifier p)
default: return "unknown precision qualifier";
}
}
+#endif
__inline bool isTypeSignedInt(TBasicType type)
{
@@ -514,7 +527,8 @@ __inline bool isTypeFloat(TBasicType type)
}
}
-__inline int getTypeRank(TBasicType type) {
+__inline int getTypeRank(TBasicType type)
+{
int res = -1;
switch(type) {
case EbtInt8:
diff --git a/thirdparty/glslang/glslang/Include/Common.h b/thirdparty/glslang/glslang/Include/Common.h
index a82c3af4dc..2c511bc1c5 100644
--- a/thirdparty/glslang/glslang/Include/Common.h
+++ b/thirdparty/glslang/glslang/Include/Common.h
@@ -51,7 +51,7 @@ std::string to_string(const T& val) {
#endif
// -- GODOT start --
-#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) /* || defined MINGW_HAS_SECURE_API*/
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) /* || defined MINGW_HAS_SECURE_API */
// -- GODOT end --
#include <basetsd.h>
#ifndef snprintf
diff --git a/thirdparty/glslang/glslang/Include/ConstantUnion.h b/thirdparty/glslang/glslang/Include/ConstantUnion.h
index 3e93340151..76b2d9c08b 100644
--- a/thirdparty/glslang/glslang/Include/ConstantUnion.h
+++ b/thirdparty/glslang/glslang/Include/ConstantUnion.h
@@ -213,56 +213,59 @@ public:
return false;
switch (type) {
- case EbtInt16:
- if (constant.i16Const == i16Const)
+ case EbtInt:
+ if (constant.iConst == iConst)
return true;
break;
- case EbtUint16:
- if (constant.u16Const == u16Const)
+ case EbtUint:
+ if (constant.uConst == uConst)
return true;
break;
- case EbtInt8:
- if (constant.i8Const == i8Const)
+ case EbtBool:
+ if (constant.bConst == bConst)
return true;
break;
- case EbtUint8:
- if (constant.u8Const == u8Const)
+ case EbtDouble:
+ if (constant.dConst == dConst)
return true;
break;
- case EbtInt:
- if (constant.iConst == iConst)
+
+#ifndef GLSLANG_WEB
+ case EbtInt16:
+ if (constant.i16Const == i16Const)
return true;
break;
- case EbtUint:
- if (constant.uConst == uConst)
+ case EbtUint16:
+ if (constant.u16Const == u16Const)
return true;
break;
- case EbtInt64:
- if (constant.i64Const == i64Const)
+ case EbtInt8:
+ if (constant.i8Const == i8Const)
return true;
break;
- case EbtUint64:
- if (constant.u64Const == u64Const)
+ case EbtUint8:
+ if (constant.u8Const == u8Const)
return true;
break;
- case EbtDouble:
- if (constant.dConst == dConst)
+ case EbtInt64:
+ if (constant.i64Const == i64Const)
return true;
break;
- case EbtBool:
- if (constant.bConst == bConst)
+ case EbtUint64:
+ if (constant.u64Const == u64Const)
return true;
break;
+#endif
default:
assert(false && "Default missing");
}
@@ -329,6 +332,22 @@ public:
{
assert(type == constant.type);
switch (type) {
+ case EbtInt:
+ if (iConst > constant.iConst)
+ return true;
+
+ return false;
+ case EbtUint:
+ if (uConst > constant.uConst)
+ return true;
+
+ return false;
+ case EbtDouble:
+ if (dConst > constant.dConst)
+ return true;
+
+ return false;
+#ifndef GLSLANG_WEB
case EbtInt8:
if (i8Const > constant.i8Const)
return true;
@@ -349,16 +368,6 @@ public:
return true;
return false;
- case EbtInt:
- if (iConst > constant.iConst)
- return true;
-
- return false;
- case EbtUint:
- if (uConst > constant.uConst)
- return true;
-
- return false;
case EbtInt64:
if (i64Const > constant.i64Const)
return true;
@@ -369,11 +378,7 @@ public:
return true;
return false;
- case EbtDouble:
- if (dConst > constant.dConst)
- return true;
-
- return false;
+#endif
default:
assert(false && "Default missing");
return false;
@@ -384,6 +389,7 @@ public:
{
assert(type == constant.type);
switch (type) {
+#ifndef GLSLANG_WEB
case EbtInt8:
if (i8Const < constant.i8Const)
return true;
@@ -394,7 +400,7 @@ public:
return true;
return false;
- case EbtInt16:
+ case EbtInt16:
if (i16Const < constant.i16Const)
return true;
@@ -402,17 +408,6 @@ public:
case EbtUint16:
if (u16Const < constant.u16Const)
return true;
-
- return false;
- case EbtInt:
- if (iConst < constant.iConst)
- return true;
-
- return false;
- case EbtUint:
- if (uConst < constant.uConst)
- return true;
-
return false;
case EbtInt64:
if (i64Const < constant.i64Const)
@@ -424,11 +419,22 @@ public:
return true;
return false;
+#endif
case EbtDouble:
if (dConst < constant.dConst)
return true;
return false;
+ case EbtInt:
+ if (iConst < constant.iConst)
+ return true;
+
+ return false;
+ case EbtUint:
+ if (uConst < constant.uConst)
+ return true;
+
+ return false;
default:
assert(false && "Default missing");
return false;
@@ -440,15 +446,17 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst + constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst + constant.uConst); break;
+ case EbtDouble: returnValue.setDConst(dConst + constant.dConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const + constant.i8Const); break;
case EbtInt16: returnValue.setI16Const(i16Const + constant.i16Const); break;
- case EbtInt: returnValue.setIConst(iConst + constant.iConst); break;
case EbtInt64: returnValue.setI64Const(i64Const + constant.i64Const); break;
case EbtUint8: returnValue.setU8Const(u8Const + constant.u8Const); break;
case EbtUint16: returnValue.setU16Const(u16Const + constant.u16Const); break;
- case EbtUint: returnValue.setUConst(uConst + constant.uConst); break;
case EbtUint64: returnValue.setU64Const(u64Const + constant.u64Const); break;
- case EbtDouble: returnValue.setDConst(dConst + constant.dConst); break;
+#endif
default: assert(false && "Default missing");
}
@@ -460,15 +468,17 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst - constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst - constant.uConst); break;
+ case EbtDouble: returnValue.setDConst(dConst - constant.dConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const - constant.i8Const); break;
case EbtInt16: returnValue.setI16Const(i16Const - constant.i16Const); break;
- case EbtInt: returnValue.setIConst(iConst - constant.iConst); break;
case EbtInt64: returnValue.setI64Const(i64Const - constant.i64Const); break;
case EbtUint8: returnValue.setU8Const(u8Const - constant.u8Const); break;
case EbtUint16: returnValue.setU16Const(u16Const - constant.u16Const); break;
- case EbtUint: returnValue.setUConst(uConst - constant.uConst); break;
case EbtUint64: returnValue.setU64Const(u64Const - constant.u64Const); break;
- case EbtDouble: returnValue.setDConst(dConst - constant.dConst); break;
+#endif
default: assert(false && "Default missing");
}
@@ -480,15 +490,17 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst * constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst * constant.uConst); break;
+ case EbtDouble: returnValue.setDConst(dConst * constant.dConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const * constant.i8Const); break;
case EbtInt16: returnValue.setI16Const(i16Const * constant.i16Const); break;
- case EbtInt: returnValue.setIConst(iConst * constant.iConst); break;
case EbtInt64: returnValue.setI64Const(i64Const * constant.i64Const); break;
case EbtUint8: returnValue.setU8Const(u8Const * constant.u8Const); break;
case EbtUint16: returnValue.setU16Const(u16Const * constant.u16Const); break;
- case EbtUint: returnValue.setUConst(uConst * constant.uConst); break;
case EbtUint64: returnValue.setU64Const(u64Const * constant.u64Const); break;
- case EbtDouble: returnValue.setDConst(dConst * constant.dConst); break;
+#endif
default: assert(false && "Default missing");
}
@@ -500,14 +512,16 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst % constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst % constant.uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const % constant.i8Const); break;
case EbtInt16: returnValue.setI8Const(i8Const % constant.i16Const); break;
- case EbtInt: returnValue.setIConst(iConst % constant.iConst); break;
case EbtInt64: returnValue.setI64Const(i64Const % constant.i64Const); break;
case EbtUint8: returnValue.setU8Const(u8Const % constant.u8Const); break;
case EbtUint16: returnValue.setU16Const(u16Const % constant.u16Const); break;
- case EbtUint: returnValue.setUConst(uConst % constant.uConst); break;
case EbtUint64: returnValue.setU64Const(u64Const % constant.u64Const); break;
+#endif
default: assert(false && "Default missing");
}
@@ -518,6 +532,7 @@ public:
{
TConstUnion returnValue;
switch (type) {
+#ifndef GLSLANG_WEB
case EbtInt8:
switch (constant.type) {
case EbtInt8: returnValue.setI8Const(i8Const >> constant.i8Const); break;
@@ -570,32 +585,38 @@ public:
default: assert(false && "Default missing");
}
break;
+#endif
case EbtInt:
switch (constant.type) {
+ case EbtInt: returnValue.setIConst(iConst >> constant.iConst); break;
+ case EbtUint: returnValue.setIConst(iConst >> constant.uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setIConst(iConst >> constant.i8Const); break;
case EbtUint8: returnValue.setIConst(iConst >> constant.u8Const); break;
case EbtInt16: returnValue.setIConst(iConst >> constant.i16Const); break;
case EbtUint16: returnValue.setIConst(iConst >> constant.u16Const); break;
- case EbtInt: returnValue.setIConst(iConst >> constant.iConst); break;
- case EbtUint: returnValue.setIConst(iConst >> constant.uConst); break;
case EbtInt64: returnValue.setIConst(iConst >> constant.i64Const); break;
case EbtUint64: returnValue.setIConst(iConst >> constant.u64Const); break;
+#endif
default: assert(false && "Default missing");
}
break;
case EbtUint:
switch (constant.type) {
+ case EbtInt: returnValue.setUConst(uConst >> constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst >> constant.uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setUConst(uConst >> constant.i8Const); break;
case EbtUint8: returnValue.setUConst(uConst >> constant.u8Const); break;
case EbtInt16: returnValue.setUConst(uConst >> constant.i16Const); break;
case EbtUint16: returnValue.setUConst(uConst >> constant.u16Const); break;
- case EbtInt: returnValue.setUConst(uConst >> constant.iConst); break;
- case EbtUint: returnValue.setUConst(uConst >> constant.uConst); break;
case EbtInt64: returnValue.setUConst(uConst >> constant.i64Const); break;
case EbtUint64: returnValue.setUConst(uConst >> constant.u64Const); break;
+#endif
default: assert(false && "Default missing");
}
break;
+#ifndef GLSLANG_WEB
case EbtInt64:
switch (constant.type) {
case EbtInt8: returnValue.setI64Const(i64Const >> constant.i8Const); break;
@@ -622,6 +643,7 @@ public:
default: assert(false && "Default missing");
}
break;
+#endif
default: assert(false && "Default missing");
}
@@ -632,6 +654,7 @@ public:
{
TConstUnion returnValue;
switch (type) {
+#ifndef GLSLANG_WEB
case EbtInt8:
switch (constant.type) {
case EbtInt8: returnValue.setI8Const(i8Const << constant.i8Const); break;
@@ -684,32 +707,6 @@ public:
default: assert(false && "Default missing");
}
break;
- case EbtInt:
- switch (constant.type) {
- case EbtInt8: returnValue.setIConst(iConst << constant.i8Const); break;
- case EbtUint8: returnValue.setIConst(iConst << constant.u8Const); break;
- case EbtInt16: returnValue.setIConst(iConst << constant.i16Const); break;
- case EbtUint16: returnValue.setIConst(iConst << constant.u16Const); break;
- case EbtInt: returnValue.setIConst(iConst << constant.iConst); break;
- case EbtUint: returnValue.setIConst(iConst << constant.uConst); break;
- case EbtInt64: returnValue.setIConst(iConst << constant.i64Const); break;
- case EbtUint64: returnValue.setIConst(iConst << constant.u64Const); break;
- default: assert(false && "Default missing");
- }
- break;
- case EbtUint:
- switch (constant.type) {
- case EbtInt8: returnValue.setUConst(uConst << constant.i8Const); break;
- case EbtUint8: returnValue.setUConst(uConst << constant.u8Const); break;
- case EbtInt16: returnValue.setUConst(uConst << constant.i16Const); break;
- case EbtUint16: returnValue.setUConst(uConst << constant.u16Const); break;
- case EbtInt: returnValue.setUConst(uConst << constant.iConst); break;
- case EbtUint: returnValue.setUConst(uConst << constant.uConst); break;
- case EbtInt64: returnValue.setUConst(uConst << constant.i64Const); break;
- case EbtUint64: returnValue.setUConst(uConst << constant.u64Const); break;
- default: assert(false && "Default missing");
- }
- break;
case EbtInt64:
switch (constant.type) {
case EbtInt8: returnValue.setI64Const(i64Const << constant.i8Const); break;
@@ -736,6 +733,37 @@ public:
default: assert(false && "Default missing");
}
break;
+#endif
+ case EbtInt:
+ switch (constant.type) {
+ case EbtInt: returnValue.setIConst(iConst << constant.iConst); break;
+ case EbtUint: returnValue.setIConst(iConst << constant.uConst); break;
+#ifndef GLSLANG_WEB
+ case EbtInt8: returnValue.setIConst(iConst << constant.i8Const); break;
+ case EbtUint8: returnValue.setIConst(iConst << constant.u8Const); break;
+ case EbtInt16: returnValue.setIConst(iConst << constant.i16Const); break;
+ case EbtUint16: returnValue.setIConst(iConst << constant.u16Const); break;
+ case EbtInt64: returnValue.setIConst(iConst << constant.i64Const); break;
+ case EbtUint64: returnValue.setIConst(iConst << constant.u64Const); break;
+#endif
+ default: assert(false && "Default missing");
+ }
+ break;
+ case EbtUint:
+ switch (constant.type) {
+ case EbtInt: returnValue.setUConst(uConst << constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst << constant.uConst); break;
+#ifndef GLSLANG_WEB
+ case EbtInt8: returnValue.setUConst(uConst << constant.i8Const); break;
+ case EbtUint8: returnValue.setUConst(uConst << constant.u8Const); break;
+ case EbtInt16: returnValue.setUConst(uConst << constant.i16Const); break;
+ case EbtUint16: returnValue.setUConst(uConst << constant.u16Const); break;
+ case EbtInt64: returnValue.setUConst(uConst << constant.i64Const); break;
+ case EbtUint64: returnValue.setUConst(uConst << constant.u64Const); break;
+#endif
+ default: assert(false && "Default missing");
+ }
+ break;
default: assert(false && "Default missing");
}
@@ -747,14 +775,16 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst & constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst & constant.uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const & constant.i8Const); break;
case EbtUint8: returnValue.setU8Const(u8Const & constant.u8Const); break;
case EbtInt16: returnValue.setI16Const(i16Const & constant.i16Const); break;
case EbtUint16: returnValue.setU16Const(u16Const & constant.u16Const); break;
- case EbtInt: returnValue.setIConst(iConst & constant.iConst); break;
- case EbtUint: returnValue.setUConst(uConst & constant.uConst); break;
case EbtInt64: returnValue.setI64Const(i64Const & constant.i64Const); break;
case EbtUint64: returnValue.setU64Const(u64Const & constant.u64Const); break;
+#endif
default: assert(false && "Default missing");
}
@@ -766,14 +796,16 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst | constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst | constant.uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const | constant.i8Const); break;
case EbtUint8: returnValue.setU8Const(u8Const | constant.u8Const); break;
case EbtInt16: returnValue.setI16Const(i16Const | constant.i16Const); break;
case EbtUint16: returnValue.setU16Const(u16Const | constant.u16Const); break;
- case EbtInt: returnValue.setIConst(iConst | constant.iConst); break;
- case EbtUint: returnValue.setUConst(uConst | constant.uConst); break;
case EbtInt64: returnValue.setI64Const(i64Const | constant.i64Const); break;
case EbtUint64: returnValue.setU64Const(u64Const | constant.u64Const); break;
+#endif
default: assert(false && "Default missing");
}
@@ -785,14 +817,16 @@ public:
TConstUnion returnValue;
assert(type == constant.type);
switch (type) {
+ case EbtInt: returnValue.setIConst(iConst ^ constant.iConst); break;
+ case EbtUint: returnValue.setUConst(uConst ^ constant.uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(i8Const ^ constant.i8Const); break;
case EbtUint8: returnValue.setU8Const(u8Const ^ constant.u8Const); break;
case EbtInt16: returnValue.setI16Const(i16Const ^ constant.i16Const); break;
case EbtUint16: returnValue.setU16Const(u16Const ^ constant.u16Const); break;
- case EbtInt: returnValue.setIConst(iConst ^ constant.iConst); break;
- case EbtUint: returnValue.setUConst(uConst ^ constant.uConst); break;
case EbtInt64: returnValue.setI64Const(i64Const ^ constant.i64Const); break;
case EbtUint64: returnValue.setU64Const(u64Const ^ constant.u64Const); break;
+#endif
default: assert(false && "Default missing");
}
@@ -803,14 +837,16 @@ public:
{
TConstUnion returnValue;
switch (type) {
+ case EbtInt: returnValue.setIConst(~iConst); break;
+ case EbtUint: returnValue.setUConst(~uConst); break;
+#ifndef GLSLANG_WEB
case EbtInt8: returnValue.setI8Const(~i8Const); break;
case EbtUint8: returnValue.setU8Const(~u8Const); break;
case EbtInt16: returnValue.setI16Const(~i16Const); break;
case EbtUint16: returnValue.setU16Const(~u16Const); break;
- case EbtInt: returnValue.setIConst(~iConst); break;
- case EbtUint: returnValue.setUConst(~uConst); break;
case EbtInt64: returnValue.setI64Const(~i64Const); break;
case EbtUint64: returnValue.setU64Const(~u64Const); break;
+#endif
default: assert(false && "Default missing");
}
diff --git a/thirdparty/glslang/glslang/Include/PoolAlloc.h b/thirdparty/glslang/glslang/Include/PoolAlloc.h
index 0e237a6a2c..b8eccb8832 100644
--- a/thirdparty/glslang/glslang/Include/PoolAlloc.h
+++ b/thirdparty/glslang/glslang/Include/PoolAlloc.h
@@ -304,7 +304,6 @@ public:
size_type max_size() const { return static_cast<size_type>(-1) / sizeof(T); }
size_type max_size(int size) const { return static_cast<size_type>(-1) / size; }
- void setAllocator(TPoolAllocator* a) { allocator = *a; }
TPoolAllocator& getAllocator() const { return allocator; }
protected:
diff --git a/thirdparty/glslang/glslang/Include/Types.h b/thirdparty/glslang/glslang/Include/Types.h
index 90341dcb27..3572099e3d 100644
--- a/thirdparty/glslang/glslang/Include/Types.h
+++ b/thirdparty/glslang/glslang/Include/Types.h
@@ -80,31 +80,59 @@ struct TSampler { // misnomer now; includes images, textures without sampler,
bool image : 1; // image, combined should be false
bool combined : 1; // true means texture is combined with a sampler, false means texture with no sampler
bool sampler : 1; // true means a pure sampler, other fields should be clear()
- bool external : 1; // GL_OES_EGL_image_external
- bool yuv : 1; // GL_EXT_YUV_target
- unsigned int vectorSize : 3; // vector return type size.
+#ifdef GLSLANG_WEB
+ bool is1D() const { return false; }
+ bool isBuffer() const { return false; }
+ bool isRect() const { return false; }
+ bool isSubpass() const { return false; }
+ bool isCombined() const { return true; }
+ bool isImage() const { return false; }
+ bool isImageClass() const { return false; }
+ bool isMultiSample() const { return false; }
+ bool isExternal() const { return false; }
+ void setExternal(bool e) { }
+ bool isYuv() const { return false; }
+#else
+ unsigned int vectorSize : 3; // vector return type size.
// Some languages support structures as sample results. Storing the whole structure in the
// TSampler is too large, so there is an index to a separate table.
static const unsigned structReturnIndexBits = 4; // number of index bits to use.
static const unsigned structReturnSlots = (1<<structReturnIndexBits)-1; // number of valid values
static const unsigned noReturnStruct = structReturnSlots; // value if no return struct type.
-
// Index into a language specific table of texture return structures.
unsigned int structReturnIndex : structReturnIndexBits;
- // Encapsulate getting members' vector sizes packed into the vectorSize bitfield.
+ bool external : 1; // GL_OES_EGL_image_external
+ bool yuv : 1; // GL_EXT_YUV_target
+
+#ifdef ENABLE_HLSL
unsigned int getVectorSize() const { return vectorSize; }
+ void clearReturnStruct() { structReturnIndex = noReturnStruct; }
+ bool hasReturnStruct() const { return structReturnIndex != noReturnStruct; }
+ unsigned getStructReturnIndex() const { return structReturnIndex; }
+#endif
- bool isImage() const { return image && dim != EsdSubpass; }
+ bool is1D() const { return dim == Esd1D; }
+ bool isBuffer() const { return dim == EsdBuffer; }
+ bool isRect() const { return dim == EsdRect; }
bool isSubpass() const { return dim == EsdSubpass; }
bool isCombined() const { return combined; }
- bool isPureSampler() const { return sampler; }
+ bool isImage() const { return image && !isSubpass(); }
+ bool isImageClass() const { return image; }
+ bool isMultiSample() const { return ms; }
+ bool isExternal() const { return external; }
+ void setExternal(bool e) { external = e; }
+ bool isYuv() const { return yuv; }
+#endif
bool isTexture() const { return !sampler && !image; }
+ bool isPureSampler() const { return sampler; }
+
+ void setCombined(bool c) { combined = c; }
+ void setBasicType(TBasicType t) { type = t; }
+ TBasicType getBasicType() const { return type; }
bool isShadow() const { return shadow; }
bool isArrayed() const { return arrayed; }
- bool isMultiSample() const { return ms; }
- bool hasReturnStruct() const { return structReturnIndex != noReturnStruct; }
void clear()
{
@@ -116,12 +144,16 @@ struct TSampler { // misnomer now; includes images, textures without sampler,
image = false;
combined = false;
sampler = false;
+#ifndef GLSLANG_WEB
external = false;
yuv = false;
- structReturnIndex = noReturnStruct;
+#endif
+#ifdef ENABLE_HLSL
+ clearReturnStruct();
// by default, returns a single vec4;
vectorSize = 4;
+#endif
}
// make a combined sampler and texture
@@ -159,6 +191,15 @@ struct TSampler { // misnomer now; includes images, textures without sampler,
ms = m;
}
+ // make a pure sampler, no texture, no image, nothing combined, the 'sampler' keyword
+ void setPureSampler(bool s)
+ {
+ clear();
+ sampler = true;
+ shadow = s;
+ }
+
+#ifndef GLSLANG_WEB
// make a subpass input attachment
void setSubpass(TBasicType t, bool m = false)
{
@@ -168,14 +209,7 @@ struct TSampler { // misnomer now; includes images, textures without sampler,
dim = EsdSubpass;
ms = m;
}
-
- // make a pure sampler, no texture, no image, nothing combined, the 'sampler' keyword
- void setPureSampler(bool s)
- {
- clear();
- sampler = true;
- shadow = s;
- }
+#endif
bool operator==(const TSampler& right) const
{
@@ -183,14 +217,17 @@ struct TSampler { // misnomer now; includes images, textures without sampler,
dim == right.dim &&
arrayed == right.arrayed &&
shadow == right.shadow &&
- ms == right.ms &&
- image == right.image &&
- combined == right.combined &&
- sampler == right.sampler &&
- external == right.external &&
- yuv == right.yuv &&
- vectorSize == right.vectorSize &&
- structReturnIndex == right.structReturnIndex;
+ isMultiSample() == right.isMultiSample() &&
+ isImageClass() == right.isImageClass() &&
+ isCombined() == right.isCombined() &&
+ isPureSampler() == right.isPureSampler() &&
+ isExternal() == right.isExternal() &&
+ isYuv() == right.isYuv()
+#ifdef ENABLE_HLSL
+ && getVectorSize() == right.getVectorSize() &&
+ getStructReturnIndex() == right.getStructReturnIndex()
+#endif
+ ;
}
bool operator!=(const TSampler& right) const
@@ -202,54 +239,55 @@ struct TSampler { // misnomer now; includes images, textures without sampler,
{
TString s;
- if (sampler) {
+ if (isPureSampler()) {
s.append("sampler");
return s;
}
switch (type) {
- case EbtFloat: break;
-#ifdef AMD_EXTENSIONS
+ case EbtInt: s.append("i"); break;
+ case EbtUint: s.append("u"); break;
+#ifndef GLSLANG_WEB
case EbtFloat16: s.append("f16"); break;
-#endif
case EbtInt8: s.append("i8"); break;
case EbtUint16: s.append("u8"); break;
case EbtInt16: s.append("i16"); break;
case EbtUint8: s.append("u16"); break;
- case EbtInt: s.append("i"); break;
- case EbtUint: s.append("u"); break;
case EbtInt64: s.append("i64"); break;
case EbtUint64: s.append("u64"); break;
- default: break; // some compilers want this
+#endif
+ default: break;
}
- if (image) {
- if (dim == EsdSubpass)
+ if (isImageClass()) {
+ if (isSubpass())
s.append("subpass");
else
s.append("image");
- } else if (combined) {
+ } else if (isCombined()) {
s.append("sampler");
} else {
s.append("texture");
}
- if (external) {
+ if (isExternal()) {
s.append("ExternalOES");
return s;
}
- if (yuv) {
+ if (isYuv()) {
return "__" + s + "External2DY2YEXT";
}
switch (dim) {
- case Esd1D: s.append("1D"); break;
case Esd2D: s.append("2D"); break;
case Esd3D: s.append("3D"); break;
case EsdCube: s.append("Cube"); break;
+#ifndef GLSLANG_WEB
+ case Esd1D: s.append("1D"); break;
case EsdRect: s.append("2DRect"); break;
case EsdBuffer: s.append("Buffer"); break;
case EsdSubpass: s.append("Input"); break;
+#endif
default: break; // some compilers want this
}
- if (ms)
+ if (isMultiSample())
s.append("MS");
if (arrayed)
s.append("Array");
@@ -422,6 +460,18 @@ enum TBlendEquationShift {
EBlendCount
};
+enum TInterlockOrdering {
+ EioNone,
+ EioPixelInterlockOrdered,
+ EioPixelInterlockUnordered,
+ EioSampleInterlockOrdered,
+ EioSampleInterlockUnordered,
+ EioShadingRateInterlockOrdered,
+ EioShadingRateInterlockUnordered,
+
+ EioCount,
+};
+
class TQualifier {
public:
static const int layoutNotSet = -1;
@@ -430,9 +480,11 @@ public:
{
precision = EpqNone;
invariant = false;
- noContraction = false;
makeTemporary();
declaredBuiltIn = EbvNone;
+#ifndef GLSLANG_WEB
+ noContraction = false;
+#endif
}
// drop qualifiers that don't belong in a temporary variable
@@ -451,8 +503,10 @@ public:
void clearInterstage()
{
clearInterpolation();
+#ifndef GLSLANG_WEB
patch = false;
sample = false;
+#endif
}
void clearInterpolation()
@@ -460,11 +514,9 @@ public:
centroid = false;
smooth = false;
flat = false;
+#ifndef GLSLANG_WEB
nopersp = false;
-#ifdef AMD_EXTENSIONS
explicitInterp = false;
-#endif
-#ifdef NV_EXTENSIONS
pervertexNV = false;
perPrimitiveNV = false;
perViewNV = false;
@@ -474,6 +526,7 @@ public:
void clearMemory()
{
+#ifndef GLSLANG_WEB
coherent = false;
devicecoherent = false;
queuefamilycoherent = false;
@@ -484,57 +537,66 @@ public:
restrict = false;
readonly = false;
writeonly = false;
- }
-
- // Drop just the storage qualification, which perhaps should
- // never be done, as it is fundamentally inconsistent, but need to
- // explore what downstream consumers need.
- // E.g., in a dereference, it is an inconsistency between:
- // A) partially dereferenced resource is still in the storage class it started in
- // B) partially dereferenced resource is a new temporary object
- // If A, then nothing should change, if B, then everything should change, but this is half way.
- void makePartialTemporary()
- {
- storage = EvqTemporary;
- specConstant = false;
- nonUniform = false;
+#endif
}
const char* semanticName;
TStorageQualifier storage : 6;
- TBuiltInVariable builtIn : 8;
- TBuiltInVariable declaredBuiltIn : 8;
+ TBuiltInVariable builtIn : 9;
+ TBuiltInVariable declaredBuiltIn : 9;
+ static_assert(EbvLast < 256, "need to increase size of TBuiltInVariable bitfields!");
TPrecisionQualifier precision : 3;
bool invariant : 1; // require canonical treatment for cross-shader invariance
- bool noContraction: 1; // prevent contraction and reassociation, e.g., for 'precise' keyword, and expressions it affects
bool centroid : 1;
bool smooth : 1;
bool flat : 1;
+ // having a constant_id is not sufficient: expressions have no id, but are still specConstant
+ bool specConstant : 1;
+ bool nonUniform : 1;
+
+#ifdef GLSLANG_WEB
+ bool isWriteOnly() const { return false; }
+ bool isReadOnly() const { return false; }
+ bool isRestrict() const { return false; }
+ bool isCoherent() const { return false; }
+ bool isVolatile() const { return false; }
+ bool isSample() const { return false; }
+ bool isMemory() const { return false; }
+ bool isMemoryQualifierImageAndSSBOOnly() const { return false; }
+ bool bufferReferenceNeedsVulkanMemoryModel() const { return false; }
+ bool isInterpolation() const { return flat || smooth; }
+ bool isExplicitInterpolation() const { return false; }
+ bool isAuxiliary() const { return centroid; }
+ bool isPatch() const { return false; }
+ bool isNoContraction() const { return false; }
+ void setNoContraction() { }
+ bool isPervertexNV() const { return false; }
+#else
+ bool noContraction: 1; // prevent contraction and reassociation, e.g., for 'precise' keyword, and expressions it affects
bool nopersp : 1;
-#ifdef AMD_EXTENSIONS
bool explicitInterp : 1;
-#endif
-#ifdef NV_EXTENSIONS
bool pervertexNV : 1;
bool perPrimitiveNV : 1;
bool perViewNV : 1;
bool perTaskNV : 1;
-#endif
bool patch : 1;
bool sample : 1;
+ bool restrict : 1;
+ bool readonly : 1;
+ bool writeonly : 1;
bool coherent : 1;
+ bool volatil : 1;
bool devicecoherent : 1;
bool queuefamilycoherent : 1;
bool workgroupcoherent : 1;
bool subgroupcoherent : 1;
bool nonprivate : 1;
- bool volatil : 1;
- bool restrict : 1;
- bool readonly : 1;
- bool writeonly : 1;
- bool specConstant : 1; // having a constant_id is not sufficient: expressions have no id, but are still specConstant
- bool nonUniform : 1;
-
+ bool isWriteOnly() const { return writeonly; }
+ bool isReadOnly() const { return readonly; }
+ bool isRestrict() const { return restrict; }
+ bool isCoherent() const { return coherent; }
+ bool isVolatile() const { return volatil; }
+ bool isSample() const { return sample; }
bool isMemory() const
{
return subgroupcoherent || workgroupcoherent || queuefamilycoherent || devicecoherent || coherent || volatil || restrict || readonly || writeonly || nonprivate;
@@ -548,31 +610,23 @@ public:
// include qualifiers that map to load/store availability/visibility/nonprivate memory access operands
return subgroupcoherent || workgroupcoherent || queuefamilycoherent || devicecoherent || coherent || nonprivate;
}
-
bool isInterpolation() const
{
-#ifdef AMD_EXTENSIONS
return flat || smooth || nopersp || explicitInterp;
-#else
- return flat || smooth || nopersp;
-#endif
}
-
-#ifdef AMD_EXTENSIONS
bool isExplicitInterpolation() const
{
return explicitInterp;
}
-#endif
-
bool isAuxiliary() const
{
-#ifdef NV_EXTENSIONS
return centroid || patch || sample || pervertexNV;
-#else
- return centroid || patch || sample;
-#endif
}
+ bool isPatch() const { return patch; }
+ bool isNoContraction() const { return noContraction; }
+ void setNoContraction() { noContraction = true; }
+ bool isPervertexNV() const { return pervertexNV; }
+#endif
bool isPipeInput() const
{
@@ -638,33 +692,6 @@ public:
}
}
- bool isPerPrimitive() const
- {
-#ifdef NV_EXTENSIONS
- return perPrimitiveNV;
-#else
- return false;
-#endif
- }
-
- bool isPerView() const
- {
-#ifdef NV_EXTENSIONS
- return perViewNV;
-#else
- return false;
-#endif
- }
-
- bool isTaskMemory() const
- {
-#ifdef NV_EXTENSIONS
- return perTaskNV;
-#else
- return false;
-#endif
- }
-
bool isIo() const
{
switch (storage) {
@@ -704,6 +731,15 @@ public:
}
}
+#ifdef GLSLANG_WEB
+ bool isPerView() const { return false; }
+ bool isTaskMemory() const { return false; }
+ bool isArrayedIo(EShLanguage language) const { return false; }
+#else
+ bool isPerPrimitive() const { return perPrimitiveNV; }
+ bool isPerView() const { return perViewNV; }
+ bool isTaskMemory() const { return perTaskNV; }
+
// True if this type of IO is supposed to be arrayed with extra level for per-vertex data
bool isArrayedIo(EShLanguage language) const
{
@@ -714,49 +750,50 @@ public:
return ! patch && (isPipeInput() || isPipeOutput());
case EShLangTessEvaluation:
return ! patch && isPipeInput();
-#ifdef NV_EXTENSIONS
case EShLangFragment:
return pervertexNV && isPipeInput();
case EShLangMeshNV:
return ! perTaskNV && isPipeOutput();
-#endif
default:
return false;
}
}
+#endif
// Implementing an embedded layout-qualifier class here, since C++ can't have a real class bitfield
void clearLayout() // all layout
{
clearUniformLayout();
+#ifndef GLSLANG_WEB
layoutPushConstant = false;
layoutBufferReference = false;
-#ifdef NV_EXTENSIONS
layoutPassthrough = false;
layoutViewportRelative = false;
// -2048 as the default value indicating layoutSecondaryViewportRelative is not set
layoutSecondaryViewportRelativeOffset = -2048;
layoutShaderRecordNV = false;
-#endif
-
layoutBufferReferenceAlign = layoutBufferReferenceAlignEnd;
+ layoutFormat = ElfNone;
+#endif
clearInterstageLayout();
layoutSpecConstantId = layoutSpecConstantIdEnd;
-
- layoutFormat = ElfNone;
}
void clearInterstageLayout()
{
layoutLocation = layoutLocationEnd;
layoutComponent = layoutComponentEnd;
+#ifndef GLSLANG_WEB
layoutIndex = layoutIndexEnd;
clearStreamLayout();
clearXfbLayout();
+#endif
}
+
+#ifndef GLSLANG_WEB
void clearStreamLayout()
{
layoutStream = layoutStreamEnd;
@@ -767,6 +804,7 @@ public:
layoutXfbStride = layoutXfbStrideEnd;
layoutXfbOffset = layoutXfbOffsetEnd;
}
+#endif
bool hasNonXfbLayout() const
{
@@ -774,11 +812,9 @@ public:
hasAnyLocation() ||
hasStream() ||
hasFormat() ||
-#ifdef NV_EXTENSIONS
- layoutShaderRecordNV ||
-#endif
- layoutPushConstant ||
- layoutBufferReference;
+ isShaderRecordNV() ||
+ isPushConstant() ||
+ hasBufferReference();
}
bool hasLayout() const
{
@@ -823,6 +859,7 @@ public:
unsigned int layoutSpecConstantId : 11;
static const unsigned int layoutSpecConstantIdEnd = 0x7FF;
+#ifndef GLSLANG_WEB
// stored as log2 of the actual alignment value
unsigned int layoutBufferReferenceAlign : 6;
static const unsigned int layoutBufferReferenceAlignEnd = 0x3F;
@@ -831,8 +868,6 @@ public:
bool layoutPushConstant;
bool layoutBufferReference;
-
-#ifdef NV_EXTENSIONS
bool layoutPassthrough;
bool layoutViewportRelative;
int layoutSecondaryViewportRelativeOffset;
@@ -857,7 +892,9 @@ public:
layoutSet = layoutSetEnd;
layoutBinding = layoutBindingEnd;
+#ifndef GLSLANG_WEB
layoutAttachment = layoutAttachmentEnd;
+#endif
}
bool hasMatrix() const
@@ -868,10 +905,6 @@ public:
{
return layoutPacking != ElpNone;
}
- bool hasOffset() const
- {
- return layoutOffset != layoutNotSet;
- }
bool hasAlign() const
{
return layoutAlign != layoutNotSet;
@@ -886,14 +919,6 @@ public:
{
return layoutLocation != layoutLocationEnd;
}
- bool hasComponent() const
- {
- return layoutComponent != layoutComponentEnd;
- }
- bool hasIndex() const
- {
- return layoutIndex != layoutIndexEnd;
- }
bool hasSet() const
{
return layoutSet != layoutSetEnd;
@@ -902,6 +927,40 @@ public:
{
return layoutBinding != layoutBindingEnd;
}
+#ifdef GLSLANG_WEB
+ bool hasOffset() const { return false; }
+ bool isNonPerspective() const { return false; }
+ bool hasIndex() const { return false; }
+ unsigned getIndex() const { return 0; }
+ bool hasComponent() const { return false; }
+ bool hasStream() const { return false; }
+ bool hasFormat() const { return false; }
+ bool hasXfb() const { return false; }
+ bool hasXfbBuffer() const { return false; }
+ bool hasXfbStride() const { return false; }
+ bool hasXfbOffset() const { return false; }
+ bool hasAttachment() const { return false; }
+ TLayoutFormat getFormat() const { return ElfNone; }
+ bool isPushConstant() const { return false; }
+ bool isShaderRecordNV() const { return false; }
+ bool hasBufferReference() const { return false; }
+ bool hasBufferReferenceAlign() const { return false; }
+ bool isNonUniform() const { return false; }
+#else
+ bool hasOffset() const
+ {
+ return layoutOffset != layoutNotSet;
+ }
+ bool isNonPerspective() const { return nopersp; }
+ bool hasIndex() const
+ {
+ return layoutIndex != layoutIndexEnd;
+ }
+ unsigned getIndex() const { return layoutIndex; }
+ bool hasComponent() const
+ {
+ return layoutComponent != layoutComponentEnd;
+ }
bool hasStream() const
{
return layoutStream != layoutStreamEnd;
@@ -932,16 +991,25 @@ public:
{
return layoutAttachment != layoutAttachmentEnd;
}
+ TLayoutFormat getFormat() const { return layoutFormat; }
+ bool isPushConstant() const { return layoutPushConstant; }
+ bool isShaderRecordNV() const { return layoutShaderRecordNV; }
+ bool hasBufferReference() const { return layoutBufferReference; }
+ bool hasBufferReferenceAlign() const
+ {
+ return layoutBufferReferenceAlign != layoutBufferReferenceAlignEnd;
+ }
+ bool isNonUniform() const
+ {
+ return nonUniform;
+ }
+#endif
bool hasSpecConstantId() const
{
// Not the same thing as being a specialization constant, this
// is just whether or not it was declared with an ID.
return layoutSpecConstantId != layoutSpecConstantIdEnd;
}
- bool hasBufferReferenceAlign() const
- {
- return layoutBufferReferenceAlign != layoutBufferReferenceAlignEnd;
- }
bool isSpecConstant() const
{
// True if type is a specialization constant, whether or not it
@@ -949,10 +1017,6 @@ public:
// true front-end constant.
return specConstant;
}
- bool isNonUniform() const
- {
- return nonUniform;
- }
bool isFrontEndConstant() const
{
// True if the front-end knows the final constant value.
@@ -972,11 +1036,13 @@ public:
static const char* getLayoutPackingString(TLayoutPacking packing)
{
switch (packing) {
+ case ElpStd140: return "std140";
+#ifndef GLSLANG_WEB
case ElpPacked: return "packed";
case ElpShared: return "shared";
- case ElpStd140: return "std140";
case ElpStd430: return "std430";
case ElpScalar: return "scalar";
+#endif
default: return "none";
}
}
@@ -988,6 +1054,9 @@ public:
default: return "none";
}
}
+#ifdef GLSLANG_WEB
+ static const char* getLayoutFormatString(TLayoutFormat f) { return "none"; }
+#else
static const char* getLayoutFormatString(TLayoutFormat f)
{
switch (f) {
@@ -1110,6 +1179,19 @@ public:
default: return 0;
}
}
+ static const char* getInterlockOrderingString(TInterlockOrdering order)
+ {
+ switch (order) {
+ case EioPixelInterlockOrdered: return "pixel_interlock_ordered";
+ case EioPixelInterlockUnordered: return "pixel_interlock_unordered";
+ case EioSampleInterlockOrdered: return "sample_interlock_ordered";
+ case EioSampleInterlockUnordered: return "sample_interlock_unordered";
+ case EioShadingRateInterlockOrdered: return "shading_rate_interlock_ordered";
+ case EioShadingRateInterlockUnordered: return "shading_rate_interlock_unordered";
+ default: return "none";
+ }
+ }
+#endif
};
// Qualifiers that don't need to be keep per object. They have shader scope, not object scope.
@@ -1124,18 +1206,22 @@ struct TShaderQualifiers {
TVertexOrder order;
bool pointMode;
int localSize[3]; // compute shader
+ bool localSizeNotDefault[3]; // compute shader
int localSizeSpecId[3]; // compute shader specialization id for gl_WorkGroupSize
+#ifndef GLSLANG_WEB
bool earlyFragmentTests; // fragment input
bool postDepthCoverage; // fragment input
TLayoutDepth layoutDepth;
bool blendEquation; // true if any blend equation was specified
int numViews; // multiview extenstions
-
-#ifdef NV_EXTENSIONS
+ TInterlockOrdering interlockOrdering;
bool layoutOverrideCoverage; // true if layout override_coverage set
bool layoutDerivativeGroupQuads; // true if layout derivative_group_quadsNV set
bool layoutDerivativeGroupLinear; // true if layout derivative_group_linearNV set
int primitives; // mesh shader "max_primitives"DerivativeGroupLinear; // true if layout derivative_group_linearNV set
+ TLayoutDepth getDepth() const { return layoutDepth; }
+#else
+ TLayoutDepth getDepth() const { return EldNone; }
#endif
void init()
@@ -1151,22 +1237,32 @@ struct TShaderQualifiers {
localSize[0] = 1;
localSize[1] = 1;
localSize[2] = 1;
+ localSizeNotDefault[0] = false;
+ localSizeNotDefault[1] = false;
+ localSizeNotDefault[2] = false;
localSizeSpecId[0] = TQualifier::layoutNotSet;
localSizeSpecId[1] = TQualifier::layoutNotSet;
localSizeSpecId[2] = TQualifier::layoutNotSet;
+#ifndef GLSLANG_WEB
earlyFragmentTests = false;
postDepthCoverage = false;
layoutDepth = EldNone;
blendEquation = false;
numViews = TQualifier::layoutNotSet;
-#ifdef NV_EXTENSIONS
layoutOverrideCoverage = false;
layoutDerivativeGroupQuads = false;
layoutDerivativeGroupLinear = false;
primitives = TQualifier::layoutNotSet;
+ interlockOrdering = EioNone;
#endif
}
+#ifdef GLSLANG_WEB
+ bool hasBlendEquation() const { return false; }
+#else
+ bool hasBlendEquation() const { return blendEquation; }
+#endif
+
// Merge in characteristics from the 'src' qualifier. They can override when
// set, but never erase when not set.
void merge(const TShaderQualifiers& src)
@@ -1192,9 +1288,13 @@ struct TShaderQualifiers {
localSize[i] = src.localSize[i];
}
for (int i = 0; i < 3; ++i) {
+ localSizeNotDefault[i] = src.localSizeNotDefault[i] || localSizeNotDefault[i];
+ }
+ for (int i = 0; i < 3; ++i) {
if (src.localSizeSpecId[i] != TQualifier::layoutNotSet)
localSizeSpecId[i] = src.localSizeSpecId[i];
}
+#ifndef GLSLANG_WEB
if (src.earlyFragmentTests)
earlyFragmentTests = true;
if (src.postDepthCoverage)
@@ -1205,7 +1305,6 @@ struct TShaderQualifiers {
blendEquation = src.blendEquation;
if (src.numViews != TQualifier::layoutNotSet)
numViews = src.numViews;
-#ifdef NV_EXTENSIONS
if (src.layoutOverrideCoverage)
layoutOverrideCoverage = src.layoutOverrideCoverage;
if (src.layoutDerivativeGroupQuads)
@@ -1214,6 +1313,8 @@ struct TShaderQualifiers {
layoutDerivativeGroupLinear = src.layoutDerivativeGroupLinear;
if (src.primitives != TQualifier::layoutNotSet)
primitives = src.primitives;
+ if (src.interlockOrdering != EioNone)
+ interlockOrdering = src.interlockOrdering;
#endif
}
};
@@ -1241,6 +1342,12 @@ public:
TSourceLoc loc;
TArraySizes* typeParameters;
+#ifdef GLSLANG_WEB
+ bool isCoopmat() const { return false; }
+#else
+ bool isCoopmat() const { return coopmat; }
+#endif
+
void initType(const TSourceLoc& l)
{
basicType = EbtVoid;
@@ -1344,11 +1451,18 @@ public:
}
typeName = NewPoolTString(p.userDef->getTypeName().c_str());
}
- if (p.coopmat && p.basicType == EbtFloat &&
- p.typeParameters && p.typeParameters->getNumDims() > 0 &&
- p.typeParameters->getDimSize(0) == 16) {
- basicType = EbtFloat16;
- qualifier.precision = EpqNone;
+ if (p.isCoopmat() && p.typeParameters && p.typeParameters->getNumDims() > 0) {
+ int numBits = p.typeParameters->getDimSize(0);
+ if (p.basicType == EbtFloat && numBits == 16) {
+ basicType = EbtFloat16;
+ qualifier.precision = EpqNone;
+ } else if (p.basicType == EbtUint && numBits == 8) {
+ basicType = EbtUint8;
+ qualifier.precision = EpqNone;
+ } else if (p.basicType == EbtInt && numBits == 8) {
+ basicType = EbtInt8;
+ qualifier.precision = EpqNone;
+ }
}
}
// for construction of sampler types
@@ -1453,7 +1567,7 @@ public:
referentType = copyOf.referentType;
}
typeParameters = copyOf.typeParameters;
- coopmat = copyOf.coopmat;
+ coopmat = copyOf.isCoopMat();
}
// Make complete copy of the whole type graph rooted at 'copyOf'.
@@ -1512,7 +1626,11 @@ public:
virtual int getOuterArraySize() const { return arraySizes->getOuterSize(); }
virtual TIntermTyped* getOuterArrayNode() const { return arraySizes->getOuterNode(); }
virtual int getCumulativeArraySize() const { return arraySizes->getCumulativeSize(); }
- virtual bool isArrayOfArrays() const { return arraySizes != nullptr && arraySizes->getNumDims() > 1; }
+#ifdef GLSLANG_WEB
+ bool isArrayOfArrays() const { return false; }
+#else
+ bool isArrayOfArrays() const { return arraySizes != nullptr && arraySizes->getNumDims() > 1; }
+#endif
virtual int getImplicitArraySize() const { return arraySizes->getImplicitSize(); }
virtual const TArraySizes* getArraySizes() const { return arraySizes; }
virtual TArraySizes* getArraySizes() { return arraySizes; }
@@ -1550,9 +1668,9 @@ public:
}
return false;
}
- virtual bool isOpaque() const { return basicType == EbtSampler || basicType == EbtAtomicUint
-#ifdef NV_EXTENSIONS
- || basicType == EbtAccStructNV
+ virtual bool isOpaque() const { return basicType == EbtSampler
+#ifndef GLSLANG_WEB
+ || basicType == EbtAtomicUint || basicType == EbtAccStructNV
#endif
; }
virtual bool isBuiltIn() const { return getQualifier().builtIn != EbvNone; }
@@ -1561,8 +1679,18 @@ public:
virtual bool isImage() const { return basicType == EbtSampler && getSampler().isImage(); }
virtual bool isSubpass() const { return basicType == EbtSampler && getSampler().isSubpass(); }
virtual bool isTexture() const { return basicType == EbtSampler && getSampler().isTexture(); }
+ // Check the block-name convention of creating a block without populating it's members:
+ virtual bool isUnusableName() const { return isStruct() && structure == nullptr; }
virtual bool isParameterized() const { return typeParameters != nullptr; }
- virtual bool isCoopMat() const { return coopmat; }
+#ifdef GLSLANG_WEB
+ bool isAtomic() const { return false; }
+ bool isCoopMat() const { return false; }
+ bool isReference() const { return false; }
+#else
+ bool isAtomic() const { return basicType == EbtAtomicUint; }
+ bool isCoopMat() const { return coopmat; }
+ bool isReference() const { return getBasicType() == EbtReference; }
+#endif
// return true if this type contains any subtype which satisfies the given predicate.
template <typename P>
@@ -1643,20 +1771,44 @@ public:
return contains([](const TType* t) { return t->isArray() && t->arraySizes->isOuterSpecialization(); } );
}
- virtual bool contains16BitInt() const
+#ifdef GLSLANG_WEB
+ bool containsDouble() const { return false; }
+ bool contains16BitFloat() const { return false; }
+ bool contains64BitInt() const { return false; }
+ bool contains16BitInt() const { return false; }
+ bool contains8BitInt() const { return false; }
+ bool containsCoopMat() const { return false; }
+ bool containsReference() const { return false; }
+#else
+ bool containsDouble() const
+ {
+ return containsBasicType(EbtDouble);
+ }
+ bool contains16BitFloat() const
+ {
+ return containsBasicType(EbtFloat16);
+ }
+ bool contains64BitInt() const
+ {
+ return containsBasicType(EbtInt64) || containsBasicType(EbtUint64);
+ }
+ bool contains16BitInt() const
{
return containsBasicType(EbtInt16) || containsBasicType(EbtUint16);
}
-
- virtual bool contains8BitInt() const
+ bool contains8BitInt() const
{
return containsBasicType(EbtInt8) || containsBasicType(EbtUint8);
}
-
- virtual bool containsCoopMat() const
+ bool containsCoopMat() const
{
return contains([](const TType* t) { return t->coopmat; } );
}
+ bool containsReference() const
+ {
+ return containsBasicType(EbtReference);
+ }
+#endif
// Array editing methods. Array descriptors can be shared across
// type instances. This allows all uses of the same array
@@ -1716,11 +1868,9 @@ public:
{
if (isUnsizedArray() && !(skipNonvariablyIndexed || isArrayVariablyIndexed()))
changeOuterArraySize(getImplicitArraySize());
-#ifdef NV_EXTENSIONS
// For multi-dim per-view arrays, set unsized inner dimension size to 1
if (qualifier.isPerView() && arraySizes && arraySizes->isInnerUnsized())
arraySizes->clearInnerUnsized();
-#endif
if (isStruct() && structure->size() > 0) {
int lastMember = (int)structure->size() - 1;
for (int i = 0; i < lastMember; ++i)
@@ -1778,31 +1928,38 @@ public:
static const char* getBasicString(TBasicType t)
{
switch (t) {
- case EbtVoid: return "void";
case EbtFloat: return "float";
+ case EbtInt: return "int";
+ case EbtUint: return "uint";
+ case EbtSampler: return "sampler/image";
+#ifndef GLSLANG_WEB
+ case EbtVoid: return "void";
case EbtDouble: return "double";
case EbtFloat16: return "float16_t";
case EbtInt8: return "int8_t";
case EbtUint8: return "uint8_t";
case EbtInt16: return "int16_t";
case EbtUint16: return "uint16_t";
- case EbtInt: return "int";
- case EbtUint: return "uint";
case EbtInt64: return "int64_t";
case EbtUint64: return "uint64_t";
case EbtBool: return "bool";
case EbtAtomicUint: return "atomic_uint";
- case EbtSampler: return "sampler/image";
case EbtStruct: return "structure";
case EbtBlock: return "block";
-#ifdef NV_EXTENSIONS
case EbtAccStructNV: return "accelerationStructureNV";
-#endif
case EbtReference: return "reference";
+#endif
default: return "unknown type";
}
}
+#ifdef GLSLANG_WEB
+ TString getCompleteString() const { return ""; }
+ const char* getStorageQualifierString() const { return ""; }
+ const char* getBuiltInVariableString() const { return ""; }
+ const char* getPrecisionQualifierString() const { return ""; }
+ TString getBasicTypeString() const { return ""; }
+#else
TString getCompleteString() const
{
TString typeString;
@@ -1891,7 +2048,6 @@ public:
appendUint(1u << qualifier.layoutBufferReferenceAlign);
}
-#ifdef NV_EXTENSIONS
if (qualifier.layoutPassthrough)
appendStr(" passthrough");
if (qualifier.layoutViewportRelative)
@@ -1902,7 +2058,6 @@ public:
}
if (qualifier.layoutShaderRecordNV)
appendStr(" shaderRecordNV");
-#endif
appendStr(")");
}
@@ -1920,11 +2075,8 @@ public:
appendStr(" flat");
if (qualifier.nopersp)
appendStr(" noperspective");
-#ifdef AMD_EXTENSIONS
if (qualifier.explicitInterp)
appendStr(" __explicitInterpAMD");
-#endif
-#ifdef NV_EXTENSIONS
if (qualifier.pervertexNV)
appendStr(" pervertexNV");
if (qualifier.perPrimitiveNV)
@@ -1933,7 +2085,6 @@ public:
appendStr(" perviewNV");
if (qualifier.perTaskNV)
appendStr(" taskNV");
-#endif
if (qualifier.patch)
appendStr(" patch");
if (qualifier.sample)
@@ -2020,13 +2171,15 @@ public:
// Add struct/block members
if (isStruct() && structure) {
appendStr("{");
+ bool hasHiddenMember = true;
for (size_t i = 0; i < structure->size(); ++i) {
if (! (*structure)[i].type->hiddenMember()) {
+ if (!hasHiddenMember)
+ appendStr(", ");
typeString.append((*structure)[i].type->getCompleteString());
typeString.append(" ");
typeString.append((*structure)[i].type->getFieldName());
- if (i < structure->size() - 1)
- appendStr(", ");
+ hasHiddenMember = false;
}
}
appendStr("}");
@@ -2046,10 +2199,13 @@ public:
const char* getStorageQualifierString() const { return GetStorageQualifierString(qualifier.storage); }
const char* getBuiltInVariableString() const { return GetBuiltInVariableString(qualifier.builtIn); }
const char* getPrecisionQualifierString() const { return GetPrecisionQualifierString(qualifier.precision); }
+#endif
+
const TTypeList* getStruct() const { assert(isStruct()); return structure; }
void setStruct(TTypeList* s) { assert(isStruct()); structure = s; }
TTypeList* getWritableStruct() const { assert(isStruct()); return structure; } // This should only be used when known to not be sharing with other threads
-
+ void setBasicType(const TBasicType& t) { basicType = t; }
+
int computeNumComponents() const
{
int components = 0;
@@ -2112,12 +2268,12 @@ public:
return true;
}
- bool sameReferenceType(const TType& right) const
+ bool sameReferenceType(const TType& right) const
{
- if ((basicType == EbtReference) != (right.basicType == EbtReference))
+ if (isReference() != right.isReference())
return false;
- if ((basicType != EbtReference) && (right.basicType != EbtReference))
+ if (!isReference() && !right.isReference())
return true;
assert(referentType != nullptr);
@@ -2129,7 +2285,7 @@ public:
return *referentType == *right.referentType;
}
- // See if two types match, in all aspects except arrayness
+ // See if two types match, in all aspects except arrayness
bool sameElementType(const TType& right) const
{
return basicType == right.basicType && sameElementShape(right);
@@ -2164,7 +2320,7 @@ public:
matrixCols == right.matrixCols &&
matrixRows == right.matrixRows &&
vector1 == right.vector1 &&
- coopmat == right.coopmat &&
+ isCoopMat() == right.isCoopMat() &&
sameStructType(right) &&
sameReferenceType(right);
}
@@ -2173,10 +2329,24 @@ public:
// an OK function parameter
bool coopMatParameterOK(const TType& right) const
{
- return coopmat && right.coopmat &&
+ return isCoopMat() && right.isCoopMat() && (getBasicType() == right.getBasicType()) &&
typeParameters == nullptr && right.typeParameters != nullptr;
}
+ bool sameCoopMatBaseType(const TType &right) const {
+ bool rv = coopmat && right.coopmat;
+ if (getBasicType() == EbtFloat || getBasicType() == EbtFloat16)
+ rv = right.getBasicType() == EbtFloat || right.getBasicType() == EbtFloat16;
+ else if (getBasicType() == EbtUint || getBasicType() == EbtUint8)
+ rv = right.getBasicType() == EbtUint || right.getBasicType() == EbtUint8;
+ else if (getBasicType() == EbtInt || getBasicType() == EbtInt8)
+ rv = right.getBasicType() == EbtInt || right.getBasicType() == EbtInt8;
+ else
+ rv = false;
+ return rv;
+ }
+
+
// See if two types match in all ways (just the actual type, not qualification)
bool operator==(const TType& right) const
{
@@ -2190,12 +2360,13 @@ public:
unsigned int getBufferReferenceAlignment() const
{
+#ifndef GLSLANG_WEB
if (getBasicType() == glslang::EbtReference) {
return getReferentType()->getQualifier().hasBufferReferenceAlign() ?
(1u << getReferentType()->getQualifier().layoutBufferReferenceAlign) : 16u;
- } else {
- return 0;
}
+#endif
+ return 0;
}
protected:
diff --git a/thirdparty/glslang/glslang/Include/intermediate.h b/thirdparty/glslang/glslang/Include/intermediate.h
index 89d1954959..29d58ca635 100644
--- a/thirdparty/glslang/glslang/Include/intermediate.h
+++ b/thirdparty/glslang/glslang/Include/intermediate.h
@@ -275,6 +275,10 @@ enum TOperator {
EOpConvUint64ToPtr,
EOpConvPtrToUint64,
+ // uvec2 <-> pointer
+ EOpConvUvec2ToPtr,
+ EOpConvPtrToUvec2,
+
//
// binary operations
//
@@ -422,11 +426,9 @@ enum TOperator {
EOpReflect,
EOpRefract,
-#ifdef AMD_EXTENSIONS
EOpMin3,
EOpMax3,
EOpMid3,
-#endif
EOpDPdx, // Fragment only
EOpDPdy, // Fragment only
@@ -441,10 +443,7 @@ enum TOperator {
EOpInterpolateAtCentroid, // Fragment only
EOpInterpolateAtSample, // Fragment only
EOpInterpolateAtOffset, // Fragment only
-
-#ifdef AMD_EXTENSIONS
EOpInterpolateAtVertex,
-#endif
EOpMatrixTimesMatrix,
EOpOuterProduct,
@@ -534,7 +533,6 @@ enum TOperator {
EOpSubgroupQuadSwapVertical,
EOpSubgroupQuadSwapDiagonal,
-#ifdef NV_EXTENSIONS
EOpSubgroupPartition,
EOpSubgroupPartitionedAdd,
EOpSubgroupPartitionedMul,
@@ -557,11 +555,9 @@ enum TOperator {
EOpSubgroupPartitionedExclusiveAnd,
EOpSubgroupPartitionedExclusiveOr,
EOpSubgroupPartitionedExclusiveXor,
-#endif
EOpSubgroupGuardStop,
-#ifdef AMD_EXTENSIONS
EOpMinInvocations,
EOpMaxInvocations,
EOpAddInvocations,
@@ -588,7 +584,6 @@ enum TOperator {
EOpCubeFaceIndex,
EOpCubeFaceCoord,
EOpTime,
-#endif
EOpAtomicAdd,
EOpAtomicMin,
@@ -621,6 +616,11 @@ enum TOperator {
EOpCooperativeMatrixStore,
EOpCooperativeMatrixMulAdd,
+ EOpBeginInvocationInterlock, // Fragment only
+ EOpEndInvocationInterlock, // Fragment only
+
+ EOpIsHelperInvocation,
+
//
// Branch
//
@@ -631,6 +631,7 @@ enum TOperator {
EOpContinue,
EOpCase,
EOpDefault,
+ EOpDemote, // Fragment only
//
// Constructors
@@ -648,9 +649,21 @@ enum TOperator {
EOpConstructBool,
EOpConstructFloat,
EOpConstructDouble,
+ // Keep vector and matrix constructors in a consistent relative order for
+ // TParseContext::constructBuiltIn, which converts between 8/16/32 bit
+ // vector constructors
EOpConstructVec2,
EOpConstructVec3,
EOpConstructVec4,
+ EOpConstructMat2x2,
+ EOpConstructMat2x3,
+ EOpConstructMat2x4,
+ EOpConstructMat3x2,
+ EOpConstructMat3x3,
+ EOpConstructMat3x4,
+ EOpConstructMat4x2,
+ EOpConstructMat4x3,
+ EOpConstructMat4x4,
EOpConstructDVec2,
EOpConstructDVec3,
EOpConstructDVec4,
@@ -681,15 +694,6 @@ enum TOperator {
EOpConstructU64Vec2,
EOpConstructU64Vec3,
EOpConstructU64Vec4,
- EOpConstructMat2x2,
- EOpConstructMat2x3,
- EOpConstructMat2x4,
- EOpConstructMat3x2,
- EOpConstructMat3x3,
- EOpConstructMat3x4,
- EOpConstructMat4x2,
- EOpConstructMat4x3,
- EOpConstructMat4x4,
EOpConstructDMat2x2,
EOpConstructDMat2x3,
EOpConstructDMat2x4,
@@ -786,10 +790,8 @@ enum TOperator {
EOpImageQuerySamples,
EOpImageLoad,
EOpImageStore,
-#ifdef AMD_EXTENSIONS
EOpImageLoadLod,
EOpImageStoreLod,
-#endif
EOpImageAtomicAdd,
EOpImageAtomicMin,
EOpImageAtomicMax,
@@ -804,9 +806,7 @@ enum TOperator {
EOpSubpassLoad,
EOpSubpassLoadMS,
EOpSparseImageLoad,
-#ifdef AMD_EXTENSIONS
EOpSparseImageLoadLod,
-#endif
EOpImageGuardEnd,
@@ -844,13 +844,11 @@ enum TOperator {
EOpTextureOffsetClamp,
EOpTextureGradClamp,
EOpTextureGradOffsetClamp,
-#ifdef AMD_EXTENSIONS
EOpTextureGatherLod,
EOpTextureGatherLodOffset,
EOpTextureGatherLodOffsets,
EOpFragmentMaskFetch,
EOpFragmentFetch,
-#endif
EOpSparseTextureGuardBegin,
@@ -870,15 +868,12 @@ enum TOperator {
EOpSparseTextureOffsetClamp,
EOpSparseTextureGradClamp,
EOpSparseTextureGradOffsetClamp,
-#ifdef AMD_EXTENSIONS
EOpSparseTextureGatherLod,
EOpSparseTextureGatherLodOffset,
EOpSparseTextureGatherLodOffsets,
-#endif
EOpSparseTextureGuardEnd,
-#ifdef NV_EXTENSIONS
EOpImageFootprintGuardBegin,
EOpImageSampleFootprintNV,
EOpImageSampleFootprintClampNV,
@@ -886,7 +881,6 @@ enum TOperator {
EOpImageSampleFootprintGradNV,
EOpImageSampleFootprintGradClampNV,
EOpImageFootprintGuardEnd,
-#endif
EOpSamplingGuardEnd,
EOpTextureGuardEnd,
@@ -905,14 +899,21 @@ enum TOperator {
EOpFindLSB,
EOpFindMSB,
-#ifdef NV_EXTENSIONS
+ EOpCountLeadingZeros,
+ EOpCountTrailingZeros,
+ EOpAbsDifference,
+ EOpAddSaturate,
+ EOpSubSaturate,
+ EOpAverage,
+ EOpAverageRounded,
+ EOpMul32x16,
+
EOpTraceNV,
EOpReportIntersectionNV,
EOpIgnoreIntersectionNV,
EOpTerminateRayNV,
EOpExecuteCallableNV,
EOpWritePackedPrimitiveIndices4x8NV,
-#endif
//
// HLSL operations
//
@@ -996,6 +997,10 @@ enum TOperator {
EOpWaveGetLaneIndex, // Will decompose to gl_SubgroupInvocationID.
EOpWaveActiveCountBits, // Will decompose to subgroupBallotBitCount(subgroupBallot()).
EOpWavePrefixCountBits, // Will decompose to subgroupBallotInclusiveBitCount(subgroupBallot()).
+
+ // Shader Clock Ops
+ EOpReadClockSubgroupKHR,
+ EOpReadClockDeviceKHR,
};
class TIntermTraverser;
@@ -1097,6 +1102,8 @@ public:
virtual bool isStruct() const { return type.isStruct(); }
virtual bool isFloatingDomain() const { return type.isFloatingDomain(); }
virtual bool isIntegerDomain() const { return type.isIntegerDomain(); }
+ bool isAtomic() const { return type.isAtomic(); }
+ bool isReference() const { return type.isReference(); }
TString getCompleteString() const { return type.getCompleteString(); }
protected:
@@ -1191,6 +1198,7 @@ public:
virtual void traverse(TIntermTraverser*);
TOperator getFlowOp() const { return flowOp; }
TIntermTyped* getExpression() const { return expression; }
+ void setExpression(TIntermTyped* pExpression) { expression = pExpression; }
protected:
TOperator flowOp;
TIntermTyped* expression;
@@ -1224,7 +1232,7 @@ public:
// it is essential to use "symbol = sym" to assign to symbol
TIntermSymbol(int i, const TString& n, const TType& t)
: TIntermTyped(t), id(i),
-#ifdef ENABLE_HLSL
+#ifndef GLSLANG_WEB
flattenSubset(-1),
#endif
constSubtree(nullptr)
@@ -1239,7 +1247,7 @@ public:
const TConstUnionArray& getConstArray() const { return constArray; }
void setConstSubtree(TIntermTyped* subtree) { constSubtree = subtree; }
TIntermTyped* getConstSubtree() const { return constSubtree; }
-#ifdef ENABLE_HLSL
+#ifndef GLSLANG_WEB
void setFlattenSubset(int subset) { flattenSubset = subset; }
int getFlattenSubset() const { return flattenSubset; } // -1 means full object
#endif
@@ -1250,7 +1258,7 @@ public:
protected:
int id; // the unique id of the symbol this node represents
-#ifdef ENABLE_HLSL
+#ifndef GLSLANG_WEB
int flattenSubset; // how deeply the flattened object rooted at id has been dereferenced
#endif
TString name; // the name of the symbol this node represents
@@ -1290,9 +1298,7 @@ struct TCrackedTextureOp {
bool grad;
bool subpass;
bool lodClamp;
-#ifdef AMD_EXTENSIONS
bool fragMask;
-#endif
};
//
@@ -1308,12 +1314,19 @@ public:
bool isConstructor() const;
bool isTexture() const { return op > EOpTextureGuardBegin && op < EOpTextureGuardEnd; }
bool isSampling() const { return op > EOpSamplingGuardBegin && op < EOpSamplingGuardEnd; }
+#ifdef GLSLANG_WEB
+ bool isImage() const { return false; }
+ bool isSparseTexture() const { return false; }
+ bool isImageFootprint() const { return false; }
+ bool isSparseImage() const { return false; }
+ bool isSubgroup() const { return false; }
+#else
bool isImage() const { return op > EOpImageGuardBegin && op < EOpImageGuardEnd; }
bool isSparseTexture() const { return op > EOpSparseTextureGuardBegin && op < EOpSparseTextureGuardEnd; }
-#ifdef NV_EXTENSIONS
bool isImageFootprint() const { return op > EOpImageFootprintGuardBegin && op < EOpImageFootprintGuardEnd; }
-#endif
bool isSparseImage() const { return op == EOpSparseImageLoad; }
+ bool isSubgroup() const { return op > EOpSubgroupGuardStart && op < EOpSubgroupGuardStop; }
+#endif
void setOperationPrecision(TPrecisionQualifier p) { operationPrecision = p; }
TPrecisionQualifier getOperationPrecision() const { return operationPrecision != EpqNone ?
@@ -1343,9 +1356,7 @@ public:
cracked.grad = false;
cracked.subpass = false;
cracked.lodClamp = false;
-#ifdef AMD_EXTENSIONS
cracked.fragMask = false;
-#endif
switch (op) {
case EOpImageQuerySize:
@@ -1360,10 +1371,6 @@ public:
case EOpTexture:
case EOpSparseTexture:
break;
- case EOpTextureClamp:
- case EOpSparseTextureClamp:
- cracked.lodClamp = true;
- break;
case EOpTextureProj:
cracked.proj = true;
break;
@@ -1375,22 +1382,17 @@ public:
case EOpSparseTextureOffset:
cracked.offset = true;
break;
- case EOpTextureOffsetClamp:
- case EOpSparseTextureOffsetClamp:
- cracked.offset = true;
- cracked.lodClamp = true;
- break;
case EOpTextureFetch:
case EOpSparseTextureFetch:
cracked.fetch = true;
- if (sampler.dim == Esd1D || (sampler.dim == Esd2D && ! sampler.ms) || sampler.dim == Esd3D)
+ if (sampler.is1D() || (sampler.dim == Esd2D && ! sampler.isMultiSample()) || sampler.dim == Esd3D)
cracked.lod = true;
break;
case EOpTextureFetchOffset:
case EOpSparseTextureFetchOffset:
cracked.fetch = true;
cracked.offset = true;
- if (sampler.dim == Esd1D || (sampler.dim == Esd2D && ! sampler.ms) || sampler.dim == Esd3D)
+ if (sampler.is1D() || (sampler.dim == Esd2D && ! sampler.isMultiSample()) || sampler.dim == Esd3D)
cracked.lod = true;
break;
case EOpTextureProjOffset:
@@ -1415,11 +1417,6 @@ public:
case EOpSparseTextureGrad:
cracked.grad = true;
break;
- case EOpTextureGradClamp:
- case EOpSparseTextureGradClamp:
- cracked.grad = true;
- cracked.lodClamp = true;
- break;
case EOpTextureGradOffset:
case EOpSparseTextureGradOffset:
cracked.grad = true;
@@ -1434,6 +1431,21 @@ public:
cracked.offset = true;
cracked.proj = true;
break;
+#ifndef GLSLANG_WEB
+ case EOpTextureClamp:
+ case EOpSparseTextureClamp:
+ cracked.lodClamp = true;
+ break;
+ case EOpTextureOffsetClamp:
+ case EOpSparseTextureOffsetClamp:
+ cracked.offset = true;
+ cracked.lodClamp = true;
+ break;
+ case EOpTextureGradClamp:
+ case EOpSparseTextureGradClamp:
+ cracked.grad = true;
+ cracked.lodClamp = true;
+ break;
case EOpTextureGradOffsetClamp:
case EOpSparseTextureGradOffsetClamp:
cracked.grad = true;
@@ -1454,7 +1466,6 @@ public:
cracked.gather = true;
cracked.offsets = true;
break;
-#ifdef AMD_EXTENSIONS
case EOpTextureGatherLod:
case EOpSparseTextureGatherLod:
cracked.gather = true;
@@ -1485,8 +1496,6 @@ public:
cracked.subpass = sampler.dim == EsdSubpass;
cracked.fragMask = true;
break;
-#endif
-#ifdef NV_EXTENSIONS
case EOpImageSampleFootprintNV:
break;
case EOpImageSampleFootprintClampNV:
@@ -1502,11 +1511,11 @@ public:
cracked.lodClamp = true;
cracked.grad = true;
break;
-#endif
case EOpSubpassLoad:
case EOpSubpassLoadMS:
cracked.subpass = true;
break;
+#endif
default:
break;
}
diff --git a/thirdparty/glslang/glslang/Include/revision.h b/thirdparty/glslang/glslang/Include/revision.h
index dd6c8da04f..a0e4b2066c 100644
--- a/thirdparty/glslang/glslang/Include/revision.h
+++ b/thirdparty/glslang/glslang/Include/revision.h
@@ -1,3 +1,3 @@
// This header is generated by the make-revision script.
-#define GLSLANG_PATCH_LEVEL 3226
+#define GLSLANG_PATCH_LEVEL 3559
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp b/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp
index b75e3efb00..98c2666fbb 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Constant.cpp
@@ -189,6 +189,24 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TIntermTyped* right
else
newConstArray[i].setDConst((double)NAN);
break;
+
+ case EbtInt:
+ if (rightUnionArray[i] == 0)
+ newConstArray[i].setIConst(0x7FFFFFFF);
+ else if (rightUnionArray[i].getIConst() == -1 && leftUnionArray[i].getIConst() == (int)-0x80000000ll)
+ newConstArray[i].setIConst((int)-0x80000000ll);
+ else
+ newConstArray[i].setIConst(leftUnionArray[i].getIConst() / rightUnionArray[i].getIConst());
+ break;
+
+ case EbtUint:
+ if (rightUnionArray[i] == 0u)
+ newConstArray[i].setUConst(0xFFFFFFFFu);
+ else
+ newConstArray[i].setUConst(leftUnionArray[i].getUConst() / rightUnionArray[i].getUConst());
+ break;
+
+#ifndef GLSLANG_WEB
case EbtInt8:
if (rightUnionArray[i] == (signed char)0)
newConstArray[i].setI8Const((signed char)0x7F);
@@ -221,22 +239,6 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TIntermTyped* right
newConstArray[i].setU16Const(leftUnionArray[i].getU16Const() / rightUnionArray[i].getU16Const());
break;
- case EbtInt:
- if (rightUnionArray[i] == 0)
- newConstArray[i].setIConst(0x7FFFFFFF);
- else if (rightUnionArray[i].getIConst() == -1 && leftUnionArray[i].getIConst() == (int)-0x80000000ll)
- newConstArray[i].setIConst((int)-0x80000000ll);
- else
- newConstArray[i].setIConst(leftUnionArray[i].getIConst() / rightUnionArray[i].getIConst());
- break;
-
- case EbtUint:
- if (rightUnionArray[i] == 0u)
- newConstArray[i].setUConst(0xFFFFFFFFu);
- else
- newConstArray[i].setUConst(leftUnionArray[i].getUConst() / rightUnionArray[i].getUConst());
- break;
-
case EbtInt64:
if (rightUnionArray[i] == 0ll)
newConstArray[i].setI64Const(0x7FFFFFFFFFFFFFFFll);
@@ -254,6 +256,7 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TIntermTyped* right
break;
default:
return 0;
+#endif
}
}
break;
@@ -292,13 +295,12 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TIntermTyped* right
newConstArray[i].setIConst(0);
break;
} else goto modulo_default;
-
+#ifndef GLSLANG_WEB
case EbtInt64:
if (rightUnionArray[i].getI64Const() == -1 && leftUnionArray[i].getI64Const() == LLONG_MIN) {
newConstArray[i].setI64Const(0);
break;
} else goto modulo_default;
-#ifdef AMD_EXTENSIONS
case EbtInt16:
if (rightUnionArray[i].getIConst() == -1 && leftUnionArray[i].getIConst() == SHRT_MIN) {
newConstArray[i].setIConst(0);
@@ -415,8 +417,8 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
case EOpEmitStreamVertex:
case EOpEndStreamPrimitive:
- // These don't actually fold
- return 0;
+ // These don't fold
+ return nullptr;
case EOpPackSnorm2x16:
case EOpPackUnorm2x16:
@@ -491,8 +493,6 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
break;
}
- // TODO: 3.0 Functionality: unary constant folding: the rest of the ops have to be fleshed out
-
case EOpPackSnorm2x16:
case EOpPackUnorm2x16:
case EOpPackHalf2x16:
@@ -510,7 +510,7 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
case EOpDeterminant:
case EOpMatrixInverse:
case EOpTranspose:
- return 0;
+ return nullptr;
default:
assert(componentWise);
@@ -529,16 +529,18 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
case EbtDouble:
case EbtFloat16:
case EbtFloat: newConstArray[i].setDConst(-unionArray[i].getDConst()); break;
+ case EbtInt: newConstArray[i].setIConst(-unionArray[i].getIConst()); break;
+ case EbtUint: newConstArray[i].setUConst(static_cast<unsigned int>(-static_cast<int>(unionArray[i].getUConst()))); break;
+#ifndef GLSLANG_WEB
case EbtInt8: newConstArray[i].setI8Const(-unionArray[i].getI8Const()); break;
case EbtUint8: newConstArray[i].setU8Const(static_cast<unsigned int>(-static_cast<signed int>(unionArray[i].getU8Const()))); break;
case EbtInt16: newConstArray[i].setI16Const(-unionArray[i].getI16Const()); break;
case EbtUint16:newConstArray[i].setU16Const(static_cast<unsigned int>(-static_cast<signed int>(unionArray[i].getU16Const()))); break;
- case EbtInt: newConstArray[i].setIConst(-unionArray[i].getIConst()); break;
- case EbtUint: newConstArray[i].setUConst(static_cast<unsigned int>(-static_cast<int>(unionArray[i].getUConst()))); break;
case EbtInt64: newConstArray[i].setI64Const(-unionArray[i].getI64Const()); break;
case EbtUint64: newConstArray[i].setU64Const(static_cast<unsigned long long>(-static_cast<long long>(unionArray[i].getU64Const()))); break;
+#endif
default:
- return 0;
+ return nullptr;
}
break;
case EOpLogicalNot:
@@ -546,7 +548,7 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
switch (getType().getBasicType()) {
case EbtBool: newConstArray[i].setBConst(!unionArray[i].getBConst()); break;
default:
- return 0;
+ return nullptr;
}
break;
case EOpBitwiseNot:
@@ -671,6 +673,48 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
break;
}
+ case EOpConvIntToBool:
+ newConstArray[i].setBConst(unionArray[i].getIConst() != 0); break;
+ case EOpConvUintToBool:
+ newConstArray[i].setBConst(unionArray[i].getUConst() != 0); break;
+ case EOpConvBoolToInt:
+ newConstArray[i].setIConst(unionArray[i].getBConst()); break;
+ case EOpConvBoolToUint:
+ newConstArray[i].setUConst(unionArray[i].getBConst()); break;
+ case EOpConvIntToUint:
+ newConstArray[i].setUConst(unionArray[i].getIConst()); break;
+ case EOpConvUintToInt:
+ newConstArray[i].setIConst(unionArray[i].getUConst()); break;
+
+ case EOpConvFloatToBool:
+ case EOpConvDoubleToBool:
+ newConstArray[i].setBConst(unionArray[i].getDConst() != 0); break;
+
+ case EOpConvBoolToFloat:
+ case EOpConvBoolToDouble:
+ newConstArray[i].setDConst(unionArray[i].getBConst()); break;
+
+ case EOpConvIntToFloat:
+ case EOpConvIntToDouble:
+ newConstArray[i].setDConst(unionArray[i].getIConst()); break;
+
+ case EOpConvUintToFloat:
+ case EOpConvUintToDouble:
+ newConstArray[i].setDConst(unionArray[i].getUConst()); break;
+
+ case EOpConvDoubleToFloat:
+ case EOpConvFloatToDouble:
+ newConstArray[i].setDConst(unionArray[i].getDConst()); break;
+
+ case EOpConvFloatToUint:
+ case EOpConvDoubleToUint:
+ newConstArray[i].setUConst(static_cast<unsigned int>(unionArray[i].getDConst())); break;
+
+ case EOpConvFloatToInt:
+ case EOpConvDoubleToInt:
+ newConstArray[i].setIConst(static_cast<int>(unionArray[i].getDConst())); break;
+
+#ifndef GLSLANG_WEB
case EOpConvInt8ToBool:
newConstArray[i].setBConst(unionArray[i].getI8Const() != 0); break;
case EOpConvUint8ToBool:
@@ -679,20 +723,12 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
newConstArray[i].setBConst(unionArray[i].getI16Const() != 0); break;
case EOpConvUint16ToBool:
newConstArray[i].setBConst(unionArray[i].getU16Const() != 0); break;
- case EOpConvIntToBool:
- newConstArray[i].setBConst(unionArray[i].getIConst() != 0); break;
- case EOpConvUintToBool:
- newConstArray[i].setBConst(unionArray[i].getUConst() != 0); break;
case EOpConvInt64ToBool:
newConstArray[i].setBConst(unionArray[i].getI64Const() != 0); break;
case EOpConvUint64ToBool:
newConstArray[i].setBConst(unionArray[i].getI64Const() != 0); break;
case EOpConvFloat16ToBool:
newConstArray[i].setBConst(unionArray[i].getDConst() != 0); break;
- case EOpConvFloatToBool:
- newConstArray[i].setBConst(unionArray[i].getDConst() != 0); break;
- case EOpConvDoubleToBool:
- newConstArray[i].setBConst(unionArray[i].getDConst() != 0); break;
case EOpConvBoolToInt8:
newConstArray[i].setI8Const(unionArray[i].getBConst()); break;
@@ -702,20 +738,12 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
newConstArray[i].setI16Const(unionArray[i].getBConst()); break;
case EOpConvBoolToUint16:
newConstArray[i].setU16Const(unionArray[i].getBConst()); break;
- case EOpConvBoolToInt:
- newConstArray[i].setIConst(unionArray[i].getBConst()); break;
- case EOpConvBoolToUint:
- newConstArray[i].setUConst(unionArray[i].getBConst()); break;
case EOpConvBoolToInt64:
newConstArray[i].setI64Const(unionArray[i].getBConst()); break;
case EOpConvBoolToUint64:
newConstArray[i].setU64Const(unionArray[i].getBConst()); break;
case EOpConvBoolToFloat16:
newConstArray[i].setDConst(unionArray[i].getBConst()); break;
- case EOpConvBoolToFloat:
- newConstArray[i].setDConst(unionArray[i].getBConst()); break;
- case EOpConvBoolToDouble:
- newConstArray[i].setDConst(unionArray[i].getBConst()); break;
case EOpConvInt8ToInt16:
newConstArray[i].setI16Const(unionArray[i].getI8Const()); break;
@@ -810,8 +838,6 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
newConstArray[i].setU8Const((unsigned char)unionArray[i].getIConst()); break;
case EOpConvIntToUint16:
newConstArray[i].setU16Const((unsigned char)unionArray[i].getIConst()); break;
- case EOpConvIntToUint:
- newConstArray[i].setUConst(unionArray[i].getIConst()); break;
case EOpConvIntToUint64:
newConstArray[i].setU64Const(unionArray[i].getIConst()); break;
@@ -819,8 +845,6 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
newConstArray[i].setI8Const((signed char)unionArray[i].getUConst()); break;
case EOpConvUintToInt16:
newConstArray[i].setI16Const((signed short)unionArray[i].getUConst()); break;
- case EOpConvUintToInt:
- newConstArray[i].setIConst(unionArray[i].getUConst()); break;
case EOpConvUintToInt64:
newConstArray[i].setI64Const(unionArray[i].getUConst()); break;
case EOpConvUintToUint8:
@@ -831,16 +855,8 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
newConstArray[i].setU64Const(unionArray[i].getUConst()); break;
case EOpConvIntToFloat16:
newConstArray[i].setDConst(unionArray[i].getIConst()); break;
- case EOpConvIntToFloat:
- newConstArray[i].setDConst(unionArray[i].getIConst()); break;
- case EOpConvIntToDouble:
- newConstArray[i].setDConst(unionArray[i].getIConst()); break;
case EOpConvUintToFloat16:
newConstArray[i].setDConst(unionArray[i].getUConst()); break;
- case EOpConvUintToFloat:
- newConstArray[i].setDConst(unionArray[i].getUConst()); break;
- case EOpConvUintToDouble:
- newConstArray[i].setDConst(unionArray[i].getUConst()); break;
case EOpConvInt64ToInt8:
newConstArray[i].setI8Const(static_cast<signed char>(unionArray[i].getI64Const())); break;
case EOpConvInt64ToInt16:
@@ -905,48 +921,35 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
newConstArray[i].setI8Const(static_cast<signed char>(unionArray[i].getDConst())); break;
case EOpConvFloatToInt16:
newConstArray[i].setI16Const(static_cast<signed short>(unionArray[i].getDConst())); break;
- case EOpConvFloatToInt:
- newConstArray[i].setIConst(static_cast<int>(unionArray[i].getDConst())); break;
case EOpConvFloatToInt64:
newConstArray[i].setI64Const(static_cast<long long>(unionArray[i].getDConst())); break;
case EOpConvFloatToUint8:
newConstArray[i].setU8Const(static_cast<unsigned char>(unionArray[i].getDConst())); break;
case EOpConvFloatToUint16:
newConstArray[i].setU16Const(static_cast<unsigned short>(unionArray[i].getDConst())); break;
- case EOpConvFloatToUint:
- newConstArray[i].setUConst(static_cast<unsigned int>(unionArray[i].getDConst())); break;
case EOpConvFloatToUint64:
newConstArray[i].setU64Const(static_cast<unsigned long long>(unionArray[i].getDConst())); break;
case EOpConvFloatToFloat16:
newConstArray[i].setDConst(unionArray[i].getDConst()); break;
- case EOpConvFloatToDouble:
- newConstArray[i].setDConst(unionArray[i].getDConst()); break;
case EOpConvDoubleToInt8:
newConstArray[i].setI8Const(static_cast<signed char>(unionArray[i].getDConst())); break;
case EOpConvDoubleToInt16:
newConstArray[i].setI16Const(static_cast<signed short>(unionArray[i].getDConst())); break;
- case EOpConvDoubleToInt:
- newConstArray[i].setIConst(static_cast<int>(unionArray[i].getDConst())); break;
case EOpConvDoubleToInt64:
newConstArray[i].setI64Const(static_cast<long long>(unionArray[i].getDConst())); break;
case EOpConvDoubleToUint8:
newConstArray[i].setU8Const(static_cast<unsigned char>(unionArray[i].getDConst())); break;
case EOpConvDoubleToUint16:
newConstArray[i].setU16Const(static_cast<unsigned short>(unionArray[i].getDConst())); break;
- case EOpConvDoubleToUint:
- newConstArray[i].setUConst(static_cast<unsigned int>(unionArray[i].getDConst())); break;
case EOpConvDoubleToUint64:
newConstArray[i].setU64Const(static_cast<unsigned long long>(unionArray[i].getDConst())); break;
case EOpConvDoubleToFloat16:
newConstArray[i].setDConst(unionArray[i].getDConst()); break;
- case EOpConvDoubleToFloat:
- newConstArray[i].setDConst(unionArray[i].getDConst()); break;
case EOpConvPtrToUint64:
case EOpConvUint64ToPtr:
case EOpConstructReference:
newConstArray[i].setU64Const(unionArray[i].getU64Const()); break;
-
-
+#endif
// TODO: 3.0 Functionality: unary constant folding: the rest of the ops have to be fleshed out
@@ -970,7 +973,7 @@ TIntermTyped* TIntermConstantUnion::fold(TOperator op, const TType& returnType)
case EOpInt16BitsToFloat16:
case EOpUint16BitsToFloat16:
default:
- return 0;
+ return nullptr;
}
}
@@ -1078,6 +1081,13 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
case EbtDouble:
newConstArray[comp].setDConst(std::min(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst()));
break;
+ case EbtInt:
+ newConstArray[comp].setIConst(std::min(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst()));
+ break;
+ case EbtUint:
+ newConstArray[comp].setUConst(std::min(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()));
+ break;
+#ifndef GLSLANG_WEB
case EbtInt8:
newConstArray[comp].setI8Const(std::min(childConstUnions[0][arg0comp].getI8Const(), childConstUnions[1][arg1comp].getI8Const()));
break;
@@ -1090,18 +1100,13 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
case EbtUint16:
newConstArray[comp].setU16Const(std::min(childConstUnions[0][arg0comp].getU16Const(), childConstUnions[1][arg1comp].getU16Const()));
break;
- case EbtInt:
- newConstArray[comp].setIConst(std::min(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst()));
- break;
- case EbtUint:
- newConstArray[comp].setUConst(std::min(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()));
- break;
case EbtInt64:
newConstArray[comp].setI64Const(std::min(childConstUnions[0][arg0comp].getI64Const(), childConstUnions[1][arg1comp].getI64Const()));
break;
case EbtUint64:
newConstArray[comp].setU64Const(std::min(childConstUnions[0][arg0comp].getU64Const(), childConstUnions[1][arg1comp].getU64Const()));
break;
+#endif
default: assert(false && "Default missing");
}
break;
@@ -1112,6 +1117,13 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
case EbtDouble:
newConstArray[comp].setDConst(std::max(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst()));
break;
+ case EbtInt:
+ newConstArray[comp].setIConst(std::max(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst()));
+ break;
+ case EbtUint:
+ newConstArray[comp].setUConst(std::max(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()));
+ break;
+#ifndef GLSLANG_WEB
case EbtInt8:
newConstArray[comp].setI8Const(std::max(childConstUnions[0][arg0comp].getI8Const(), childConstUnions[1][arg1comp].getI8Const()));
break;
@@ -1124,18 +1136,13 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
case EbtUint16:
newConstArray[comp].setU16Const(std::max(childConstUnions[0][arg0comp].getU16Const(), childConstUnions[1][arg1comp].getU16Const()));
break;
- case EbtInt:
- newConstArray[comp].setIConst(std::max(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst()));
- break;
- case EbtUint:
- newConstArray[comp].setUConst(std::max(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()));
- break;
case EbtInt64:
newConstArray[comp].setI64Const(std::max(childConstUnions[0][arg0comp].getI64Const(), childConstUnions[1][arg1comp].getI64Const()));
break;
case EbtUint64:
newConstArray[comp].setU64Const(std::max(childConstUnions[0][arg0comp].getU64Const(), childConstUnions[1][arg1comp].getU64Const()));
break;
+#endif
default: assert(false && "Default missing");
}
break;
@@ -1147,6 +1154,11 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
newConstArray[comp].setDConst(std::min(std::max(childConstUnions[0][arg0comp].getDConst(), childConstUnions[1][arg1comp].getDConst()),
childConstUnions[2][arg2comp].getDConst()));
break;
+ case EbtUint:
+ newConstArray[comp].setUConst(std::min(std::max(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()),
+ childConstUnions[2][arg2comp].getUConst()));
+ break;
+#ifndef GLSLANG_WEB
case EbtInt8:
newConstArray[comp].setI8Const(std::min(std::max(childConstUnions[0][arg0comp].getI8Const(), childConstUnions[1][arg1comp].getI8Const()),
childConstUnions[2][arg2comp].getI8Const()));
@@ -1167,10 +1179,6 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
newConstArray[comp].setIConst(std::min(std::max(childConstUnions[0][arg0comp].getIConst(), childConstUnions[1][arg1comp].getIConst()),
childConstUnions[2][arg2comp].getIConst()));
break;
- case EbtUint:
- newConstArray[comp].setUConst(std::min(std::max(childConstUnions[0][arg0comp].getUConst(), childConstUnions[1][arg1comp].getUConst()),
- childConstUnions[2][arg2comp].getUConst()));
- break;
case EbtInt64:
newConstArray[comp].setI64Const(std::min(std::max(childConstUnions[0][arg0comp].getI64Const(), childConstUnions[1][arg1comp].getI64Const()),
childConstUnions[2][arg2comp].getI64Const()));
@@ -1179,6 +1187,7 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
newConstArray[comp].setU64Const(std::min(std::max(childConstUnions[0][arg0comp].getU64Const(), childConstUnions[1][arg1comp].getU64Const()),
childConstUnions[2][arg2comp].getU64Const()));
break;
+#endif
default: assert(false && "Default missing");
}
break;
@@ -1201,12 +1210,17 @@ TIntermTyped* TIntermediate::fold(TIntermAggregate* aggrNode)
newConstArray[comp].setBConst(childConstUnions[0][arg0comp] != childConstUnions[1][arg1comp]);
break;
case EOpMix:
- if (children[2]->getAsTyped()->getBasicType() == EbtBool)
- newConstArray[comp].setDConst(childConstUnions[2][arg2comp].getBConst() ? childConstUnions[1][arg1comp].getDConst() :
- childConstUnions[0][arg0comp].getDConst());
- else
- newConstArray[comp].setDConst(childConstUnions[0][arg0comp].getDConst() * (1.0 - childConstUnions[2][arg2comp].getDConst()) +
- childConstUnions[1][arg1comp].getDConst() * childConstUnions[2][arg2comp].getDConst());
+ if (!children[0]->getAsTyped()->isFloatingDomain())
+ return aggrNode;
+ if (children[2]->getAsTyped()->getBasicType() == EbtBool) {
+ newConstArray[comp].setDConst(childConstUnions[2][arg2comp].getBConst()
+ ? childConstUnions[1][arg1comp].getDConst()
+ : childConstUnions[0][arg0comp].getDConst());
+ } else {
+ newConstArray[comp].setDConst(
+ childConstUnions[0][arg0comp].getDConst() * (1.0 - childConstUnions[2][arg2comp].getDConst()) +
+ childConstUnions[1][arg1comp].getDConst() * childConstUnions[2][arg2comp].getDConst());
+ }
break;
case EOpStep:
newConstArray[comp].setDConst(childConstUnions[1][arg1comp].getDConst() < childConstUnions[0][arg0comp].getDConst() ? 0.0 : 1.0);
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp b/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp
index 0498b4871a..de55742649 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Initialize.cpp
@@ -64,6 +64,420 @@ const bool ForwardCompatibility = false;
// Using PureOperatorBuiltins=false is deprecated.
bool PureOperatorBuiltins = true;
+namespace {
+
+//
+// A set of definitions for tabling of the built-in functions.
+//
+
+// Order matters here, as does correlation with the subsequent
+// "const int ..." declarations and the ArgType enumerants.
+const char* TypeString[] = {
+ "bool", "bvec2", "bvec3", "bvec4",
+ "float", "vec2", "vec3", "vec4",
+ "int", "ivec2", "ivec3", "ivec4",
+ "uint", "uvec2", "uvec3", "uvec4",
+};
+const int TypeStringCount = sizeof(TypeString) / sizeof(char*); // number of entries in 'TypeString'
+const int TypeStringRowShift = 2; // shift amount to go downe one row in 'TypeString'
+const int TypeStringColumnMask = (1 << TypeStringRowShift) - 1; // reduce type to its column number in 'TypeString'
+const int TypeStringScalarMask = ~TypeStringColumnMask; // take type to its scalar column in 'TypeString'
+
+enum ArgType {
+ // numbers hardcoded to correspond to 'TypeString'; order and value matter
+ TypeB = 1 << 0, // Boolean
+ TypeF = 1 << 1, // float 32
+ TypeI = 1 << 2, // int 32
+ TypeU = 1 << 3, // uint 32
+ TypeF16 = 1 << 4, // float 16
+ TypeF64 = 1 << 5, // float 64
+ TypeI8 = 1 << 6, // int 8
+ TypeI16 = 1 << 7, // int 16
+ TypeI64 = 1 << 8, // int 64
+ TypeU8 = 1 << 9, // uint 8
+ TypeU16 = 1 << 10, // uint 16
+ TypeU64 = 1 << 11, // uint 64
+};
+// Mixtures of the above, to help the function tables
+const ArgType TypeFI = static_cast<ArgType>(TypeF | TypeI);
+const ArgType TypeFIB = static_cast<ArgType>(TypeF | TypeI | TypeB);
+const ArgType TypeIU = static_cast<ArgType>(TypeI | TypeU);
+
+// The relationships between arguments and return type, whether anything is
+// output, or other unusual situations.
+enum ArgClass {
+ ClassRegular = 0, // nothing special, just all vector widths with matching return type; traditional arithmetic
+ ClassLS = 1 << 0, // the last argument is also held fixed as a (type-matched) scalar while the others cycle
+ ClassXLS = 1 << 1, // the last argument is exclusively a (type-matched) scalar while the others cycle
+ ClassLS2 = 1 << 2, // the last two arguments are held fixed as a (type-matched) scalar while the others cycle
+ ClassFS = 1 << 3, // the first argument is held fixed as a (type-matched) scalar while the others cycle
+ ClassFS2 = 1 << 4, // the first two arguments are held fixed as a (type-matched) scalar while the others cycle
+ ClassLO = 1 << 5, // the last argument is an output
+ ClassB = 1 << 6, // return type cycles through only bool/bvec, matching vector width of args
+ ClassLB = 1 << 7, // last argument cycles through only bool/bvec, matching vector width of args
+ ClassV1 = 1 << 8, // scalar only
+ ClassFIO = 1 << 9, // first argument is inout
+ ClassRS = 1 << 10, // the return is held scalar as the arguments cycle
+ ClassNS = 1 << 11, // no scalar prototype
+ ClassCV = 1 << 12, // first argument is 'coherent volatile'
+ ClassFO = 1 << 13, // first argument is output
+ ClassV3 = 1 << 14, // vec3 only
+};
+// Mixtures of the above, to help the function tables
+const ArgClass ClassV1FIOCV = (ArgClass)(ClassV1 | ClassFIO | ClassCV);
+const ArgClass ClassV1FOCV = (ArgClass)(ClassV1 | ClassFO | ClassCV);
+const ArgClass ClassV1CV = (ArgClass)(ClassV1 | ClassCV);
+const ArgClass ClassBNS = (ArgClass)(ClassB | ClassNS);
+const ArgClass ClassRSNS = (ArgClass)(ClassRS | ClassNS);
+
+// A descriptor, for a single profile, of when something is available.
+// If the current profile does not match 'profile' mask below, the other fields
+// do not apply (nor validate).
+// profiles == EBadProfile is the end of an array of these
+struct Versioning {
+ EProfile profiles; // the profile(s) (mask) that the following fields are valid for
+ int minExtendedVersion; // earliest version when extensions are enabled; ignored if numExtensions is 0
+ int minCoreVersion; // earliest version function is in core; 0 means never
+ int numExtensions; // how many extensions are in the 'extensions' list
+ const char** extensions; // list of extension names enabling the function
+};
+
+EProfile EDesktopProfile = static_cast<EProfile>(ENoProfile | ECoreProfile | ECompatibilityProfile);
+
+// Declare pointers to put into the table for versioning.
+#ifdef GLSLANG_WEB
+ const Versioning* Es300Desktop130 = nullptr;
+ const Versioning* Es310Desktop430 = nullptr;
+#else
+ const Versioning Es300Desktop130Version[] = { { EEsProfile, 0, 300, 0, nullptr },
+ { EDesktopProfile, 0, 130, 0, nullptr },
+ { EBadProfile } };
+ const Versioning* Es300Desktop130 = &Es300Desktop130Version[0];
+
+ const Versioning Es310Desktop430Version[] = { { EEsProfile, 0, 310, 0, nullptr },
+ { EDesktopProfile, 0, 430, 0, nullptr },
+ { EBadProfile } };
+ const Versioning* Es310Desktop430 = &Es310Desktop430Version[0];
+
+ const Versioning Es310Desktop450Version[] = { { EEsProfile, 0, 310, 0, nullptr },
+ { EDesktopProfile, 0, 450, 0, nullptr },
+ { EBadProfile } };
+ const Versioning* Es310Desktop450 = &Es310Desktop450Version[0];
+#endif
+
+// The main descriptor of what a set of function prototypes can look like, and
+// a pointer to extra versioning information, when needed.
+struct BuiltInFunction {
+ TOperator op; // operator to map the name to
+ const char* name; // function name
+ int numArguments; // number of arguments (overloads with varying arguments need different entries)
+ ArgType types; // ArgType mask
+ ArgClass classes; // the ways this particular function entry manifests
+ const Versioning* versioning; // nullptr means always a valid version
+};
+
+// The tables can have the same built-in function name more than one time,
+// but the exact same prototype must be indicated at most once.
+// The prototypes that get declared are the union of all those indicated.
+// This is important when different releases add new prototypes for the same name.
+// It also also congnitively simpler tiling of the prototype space.
+// In practice, most names can be fully represented with one entry.
+//
+// Table is terminated by an OpNull TOperator.
+
+const BuiltInFunction BaseFunctions[] = {
+// TOperator, name, arg-count, ArgType, ArgClass, versioning
+// --------- ---- --------- ------- -------- ----------
+ { EOpRadians, "radians", 1, TypeF, ClassRegular, nullptr },
+ { EOpDegrees, "degrees", 1, TypeF, ClassRegular, nullptr },
+ { EOpSin, "sin", 1, TypeF, ClassRegular, nullptr },
+ { EOpCos, "cos", 1, TypeF, ClassRegular, nullptr },
+ { EOpTan, "tan", 1, TypeF, ClassRegular, nullptr },
+ { EOpAsin, "asin", 1, TypeF, ClassRegular, nullptr },
+ { EOpAcos, "acos", 1, TypeF, ClassRegular, nullptr },
+ { EOpAtan, "atan", 2, TypeF, ClassRegular, nullptr },
+ { EOpAtan, "atan", 1, TypeF, ClassRegular, nullptr },
+ { EOpPow, "pow", 2, TypeF, ClassRegular, nullptr },
+ { EOpExp, "exp", 1, TypeF, ClassRegular, nullptr },
+ { EOpLog, "log", 1, TypeF, ClassRegular, nullptr },
+ { EOpExp2, "exp2", 1, TypeF, ClassRegular, nullptr },
+ { EOpLog2, "log2", 1, TypeF, ClassRegular, nullptr },
+ { EOpSqrt, "sqrt", 1, TypeF, ClassRegular, nullptr },
+ { EOpInverseSqrt, "inversesqrt", 1, TypeF, ClassRegular, nullptr },
+ { EOpAbs, "abs", 1, TypeF, ClassRegular, nullptr },
+ { EOpSign, "sign", 1, TypeF, ClassRegular, nullptr },
+ { EOpFloor, "floor", 1, TypeF, ClassRegular, nullptr },
+ { EOpCeil, "ceil", 1, TypeF, ClassRegular, nullptr },
+ { EOpFract, "fract", 1, TypeF, ClassRegular, nullptr },
+ { EOpMod, "mod", 2, TypeF, ClassLS, nullptr },
+ { EOpMin, "min", 2, TypeF, ClassLS, nullptr },
+ { EOpMax, "max", 2, TypeF, ClassLS, nullptr },
+ { EOpClamp, "clamp", 3, TypeF, ClassLS2, nullptr },
+ { EOpMix, "mix", 3, TypeF, ClassLS, nullptr },
+ { EOpStep, "step", 2, TypeF, ClassFS, nullptr },
+ { EOpSmoothStep, "smoothstep", 3, TypeF, ClassFS2, nullptr },
+ { EOpNormalize, "normalize", 1, TypeF, ClassRegular, nullptr },
+ { EOpFaceForward, "faceforward", 3, TypeF, ClassRegular, nullptr },
+ { EOpReflect, "reflect", 2, TypeF, ClassRegular, nullptr },
+ { EOpRefract, "refract", 3, TypeF, ClassXLS, nullptr },
+ { EOpLength, "length", 1, TypeF, ClassRS, nullptr },
+ { EOpDistance, "distance", 2, TypeF, ClassRS, nullptr },
+ { EOpDot, "dot", 2, TypeF, ClassRS, nullptr },
+ { EOpCross, "cross", 2, TypeF, ClassV3, nullptr },
+ { EOpLessThan, "lessThan", 2, TypeFI, ClassBNS, nullptr },
+ { EOpLessThanEqual, "lessThanEqual", 2, TypeFI, ClassBNS, nullptr },
+ { EOpGreaterThan, "greaterThan", 2, TypeFI, ClassBNS, nullptr },
+ { EOpGreaterThanEqual, "greaterThanEqual", 2, TypeFI, ClassBNS, nullptr },
+ { EOpVectorEqual, "equal", 2, TypeFIB, ClassBNS, nullptr },
+ { EOpVectorNotEqual, "notEqual", 2, TypeFIB, ClassBNS, nullptr },
+ { EOpAny, "any", 1, TypeB, ClassRSNS, nullptr },
+ { EOpAll, "all", 1, TypeB, ClassRSNS, nullptr },
+ { EOpVectorLogicalNot, "not", 1, TypeB, ClassNS, nullptr },
+ { EOpSinh, "sinh", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpCosh, "cosh", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpTanh, "tanh", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpAsinh, "asinh", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpAcosh, "acosh", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpAtanh, "atanh", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpAbs, "abs", 1, TypeI, ClassRegular, Es300Desktop130 },
+ { EOpSign, "sign", 1, TypeI, ClassRegular, Es300Desktop130 },
+ { EOpTrunc, "trunc", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpRound, "round", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpRoundEven, "roundEven", 1, TypeF, ClassRegular, Es300Desktop130 },
+ { EOpModf, "modf", 2, TypeF, ClassLO, Es300Desktop130 },
+ { EOpMin, "min", 2, TypeIU, ClassLS, Es300Desktop130 },
+ { EOpMax, "max", 2, TypeIU, ClassLS, Es300Desktop130 },
+ { EOpClamp, "clamp", 3, TypeIU, ClassLS2, Es300Desktop130 },
+ { EOpMix, "mix", 3, TypeF, ClassLB, Es300Desktop130 },
+ { EOpIsInf, "isinf", 1, TypeF, ClassB, Es300Desktop130 },
+ { EOpIsNan, "isnan", 1, TypeF, ClassB, Es300Desktop130 },
+ { EOpLessThan, "lessThan", 2, TypeU, ClassBNS, Es300Desktop130 },
+ { EOpLessThanEqual, "lessThanEqual", 2, TypeU, ClassBNS, Es300Desktop130 },
+ { EOpGreaterThan, "greaterThan", 2, TypeU, ClassBNS, Es300Desktop130 },
+ { EOpGreaterThanEqual, "greaterThanEqual", 2, TypeU, ClassBNS, Es300Desktop130 },
+ { EOpVectorEqual, "equal", 2, TypeU, ClassBNS, Es300Desktop130 },
+ { EOpVectorNotEqual, "notEqual", 2, TypeU, ClassBNS, Es300Desktop130 },
+ { EOpAtomicAdd, "atomicAdd", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicMin, "atomicMin", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicMax, "atomicMax", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicAnd, "atomicAnd", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicOr, "atomicOr", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicXor, "atomicXor", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicExchange, "atomicExchange", 2, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+ { EOpAtomicCompSwap, "atomicCompSwap", 3, TypeIU, ClassV1FIOCV, Es310Desktop430 },
+#ifndef GLSLANG_WEB
+ { EOpMix, "mix", 3, TypeB, ClassRegular, Es310Desktop450 },
+ { EOpMix, "mix", 3, TypeIU, ClassLB, Es310Desktop450 },
+#endif
+
+ { EOpNull }
+};
+
+const BuiltInFunction DerivativeFunctions[] = {
+ { EOpDPdx, "dFdx", 1, TypeF, ClassRegular, nullptr },
+ { EOpDPdy, "dFdy", 1, TypeF, ClassRegular, nullptr },
+ { EOpFwidth, "fwidth", 1, TypeF, ClassRegular, nullptr },
+ { EOpNull }
+};
+
+// For functions declared some other way, but still use the table to relate to operator.
+struct CustomFunction {
+ TOperator op; // operator to map the name to
+ const char* name; // function name
+ const Versioning* versioning; // nullptr means always a valid version
+};
+
+const CustomFunction CustomFunctions[] = {
+ { EOpBarrier, "barrier", nullptr },
+ { EOpMemoryBarrierShared, "memoryBarrierShared", nullptr },
+ { EOpGroupMemoryBarrier, "groupMemoryBarrier", nullptr },
+ { EOpMemoryBarrier, "memoryBarrier", nullptr },
+ { EOpMemoryBarrierBuffer, "memoryBarrierBuffer", nullptr },
+
+ { EOpPackSnorm2x16, "packSnorm2x16", nullptr },
+ { EOpUnpackSnorm2x16, "unpackSnorm2x16", nullptr },
+ { EOpPackUnorm2x16, "packUnorm2x16", nullptr },
+ { EOpUnpackUnorm2x16, "unpackUnorm2x16", nullptr },
+ { EOpPackHalf2x16, "packHalf2x16", nullptr },
+ { EOpUnpackHalf2x16, "unpackHalf2x16", nullptr },
+
+ { EOpMul, "matrixCompMult", nullptr },
+ { EOpOuterProduct, "outerProduct", nullptr },
+ { EOpTranspose, "transpose", nullptr },
+ { EOpDeterminant, "determinant", nullptr },
+ { EOpMatrixInverse, "inverse", nullptr },
+ { EOpFloatBitsToInt, "floatBitsToInt", nullptr },
+ { EOpFloatBitsToUint, "floatBitsToUint", nullptr },
+ { EOpIntBitsToFloat, "intBitsToFloat", nullptr },
+ { EOpUintBitsToFloat, "uintBitsToFloat", nullptr },
+
+ { EOpTextureQuerySize, "textureSize", nullptr },
+ { EOpTextureQueryLod, "textureQueryLod", nullptr },
+ { EOpTextureQueryLevels, "textureQueryLevels", nullptr },
+ { EOpTextureQuerySamples, "textureSamples", nullptr },
+ { EOpTexture, "texture", nullptr },
+ { EOpTextureProj, "textureProj", nullptr },
+ { EOpTextureLod, "textureLod", nullptr },
+ { EOpTextureOffset, "textureOffset", nullptr },
+ { EOpTextureFetch, "texelFetch", nullptr },
+ { EOpTextureFetchOffset, "texelFetchOffset", nullptr },
+ { EOpTextureProjOffset, "textureProjOffset", nullptr },
+ { EOpTextureLodOffset, "textureLodOffset", nullptr },
+ { EOpTextureProjLod, "textureProjLod", nullptr },
+ { EOpTextureProjLodOffset, "textureProjLodOffset", nullptr },
+ { EOpTextureGrad, "textureGrad", nullptr },
+ { EOpTextureGradOffset, "textureGradOffset", nullptr },
+ { EOpTextureProjGrad, "textureProjGrad", nullptr },
+ { EOpTextureProjGradOffset, "textureProjGradOffset", nullptr },
+
+ { EOpNull }
+};
+
+// For the given table of functions, add all the indicated prototypes for each
+// one, to be returned in the passed in decls.
+void AddTabledBuiltin(TString& decls, const BuiltInFunction& function)
+{
+ const auto isScalarType = [](int type) { return (type & TypeStringColumnMask) == 0; };
+
+ // loop across these two:
+ // 0: the varying arg set, and
+ // 1: the fixed scalar args
+ const ArgClass ClassFixed = (ArgClass)(ClassLS | ClassXLS | ClassLS2 | ClassFS | ClassFS2);
+ for (int fixed = 0; fixed < ((function.classes & ClassFixed) > 0 ? 2 : 1); ++fixed) {
+
+ if (fixed == 0 && (function.classes & ClassXLS))
+ continue;
+
+ // walk the type strings in TypeString[]
+ for (int type = 0; type < TypeStringCount; ++type) {
+ // skip types not selected: go from type to row number to type bit
+ if ((function.types & (1 << (type >> TypeStringRowShift))) == 0)
+ continue;
+
+ // if we aren't on a scalar, and should be, skip
+ if ((function.classes & ClassV1) && !isScalarType(type))
+ continue;
+
+ // if we aren't on a 3-vector, and should be, skip
+ if ((function.classes & ClassV3) && (type & TypeStringColumnMask) != 2)
+ continue;
+
+ // skip replication of all arg scalars between the varying arg set and the fixed args
+ if (fixed == 1 && type == (type & TypeStringScalarMask) && (function.classes & ClassXLS) == 0)
+ continue;
+
+ // skip scalars when we are told to
+ if ((function.classes & ClassNS) && isScalarType(type))
+ continue;
+
+ // return type
+ if (function.classes & ClassB)
+ decls.append(TypeString[type & TypeStringColumnMask]);
+ else if (function.classes & ClassRS)
+ decls.append(TypeString[type & TypeStringScalarMask]);
+ else
+ decls.append(TypeString[type]);
+ decls.append(" ");
+ decls.append(function.name);
+ decls.append("(");
+
+ // arguments
+ for (int arg = 0; arg < function.numArguments; ++arg) {
+ if (arg == function.numArguments - 1 && (function.classes & ClassLO))
+ decls.append("out ");
+ if (arg == 0) {
+#ifndef GLSLANG_WEB
+ if (function.classes & ClassCV)
+ decls.append("coherent volatile ");
+#endif
+ if (function.classes & ClassFIO)
+ decls.append("inout ");
+ if (function.classes & ClassFO)
+ decls.append("out ");
+ }
+ if ((function.classes & ClassLB) && arg == function.numArguments - 1)
+ decls.append(TypeString[type & TypeStringColumnMask]);
+ else if (fixed && ((arg == function.numArguments - 1 && (function.classes & (ClassLS | ClassXLS |
+ ClassLS2))) ||
+ (arg == function.numArguments - 2 && (function.classes & ClassLS2)) ||
+ (arg == 0 && (function.classes & (ClassFS | ClassFS2))) ||
+ (arg == 1 && (function.classes & ClassFS2))))
+ decls.append(TypeString[type & TypeStringScalarMask]);
+ else
+ decls.append(TypeString[type]);
+ if (arg < function.numArguments - 1)
+ decls.append(",");
+ }
+ decls.append(");\n");
+ }
+ }
+}
+
+// See if the tabled versioning information allows the current version.
+bool ValidVersion(const BuiltInFunction& function, int version, EProfile profile, const SpvVersion& /* spVersion */)
+{
+#ifdef GLSLANG_WEB
+ // all entries in table are valid
+ return true;
+#endif
+
+ // nullptr means always valid
+ if (function.versioning == nullptr)
+ return true;
+
+ // check for what is said about our current profile
+ for (const Versioning* v = function.versioning; v->profiles != EBadProfile; ++v) {
+ if ((v->profiles & profile) != 0) {
+ if (v->minCoreVersion <= version || (v->numExtensions > 0 && v->minExtendedVersion <= version))
+ return true;
+ }
+ }
+
+ return false;
+}
+
+// Relate a single table of built-ins to their AST operator.
+// This can get called redundantly (especially for the common built-ins, when
+// called once per stage). This is a performance issue only, not a correctness
+// concern. It is done for quality arising from simplicity, as there are subtleties
+// to get correct if instead trying to do it surgically.
+template<class FunctionT>
+void RelateTabledBuiltins(const FunctionT* functions, TSymbolTable& symbolTable)
+{
+ while (functions->op != EOpNull) {
+ symbolTable.relateToOperator(functions->name, functions->op);
+ ++functions;
+ }
+}
+
+} // end anonymous namespace
+
+// Add declarations for all tables of built-in functions.
+void TBuiltIns::addTabledBuiltins(int version, EProfile profile, const SpvVersion& spvVersion)
+{
+ const auto forEachFunction = [&](TString& decls, const BuiltInFunction* function) {
+ while (function->op != EOpNull) {
+ if (ValidVersion(*function, version, profile, spvVersion))
+ AddTabledBuiltin(decls, *function);
+ ++function;
+ }
+ };
+
+ forEachFunction(commonBuiltins, BaseFunctions);
+ forEachFunction(stageBuiltins[EShLangFragment], DerivativeFunctions);
+
+ if ((profile == EEsProfile && version >= 320) || (profile != EEsProfile && version >= 450))
+ forEachFunction(stageBuiltins[EShLangCompute], DerivativeFunctions);
+}
+
+// Relate all tables of built-ins to the AST operators.
+void TBuiltIns::relateTabledBuiltins(int /* version */, EProfile /* profile */, const SpvVersion& /* spvVersion */, EShLanguage /* stage */, TSymbolTable& symbolTable)
+{
+ RelateTabledBuiltins(BaseFunctions, symbolTable);
+ RelateTabledBuiltins(DerivativeFunctions, symbolTable);
+ RelateTabledBuiltins(CustomFunctions, symbolTable);
+}
+
inline bool IncludeLegacy(int version, EProfile profile, const SpvVersion& spvVersion)
{
return profile != EEsProfile && (version <= 130 || (spvVersion.spv == 0 && ARBCompatibility) || profile == ECompatibilityProfile);
@@ -84,27 +498,30 @@ TBuiltIns::TBuiltIns()
// Set up textual representations for making all the permutations
// of texturing/imaging functions.
prefixes[EbtFloat] = "";
-#ifdef AMD_EXTENSIONS
+ prefixes[EbtInt] = "i";
+ prefixes[EbtUint] = "u";
+#ifndef GLSLANG_WEB
prefixes[EbtFloat16] = "f16";
-#endif
prefixes[EbtInt8] = "i8";
prefixes[EbtUint8] = "u8";
prefixes[EbtInt16] = "i16";
prefixes[EbtUint16] = "u16";
- prefixes[EbtInt] = "i";
- prefixes[EbtUint] = "u";
+#endif
+
postfixes[2] = "2";
postfixes[3] = "3";
postfixes[4] = "4";
// Map from symbolic class of texturing dimension to numeric dimensions.
- dimMap[Esd1D] = 1;
dimMap[Esd2D] = 2;
- dimMap[EsdRect] = 2;
dimMap[Esd3D] = 3;
dimMap[EsdCube] = 3;
+#ifndef GLSLANG_WEB
+ dimMap[Esd1D] = 1;
+ dimMap[EsdRect] = 2;
dimMap[EsdBuffer] = 1;
- dimMap[EsdSubpass] = 2; // potientially unused for now
+ dimMap[EsdSubpass] = 2; // potentially unused for now
+#endif
}
TBuiltIns::~TBuiltIns()
@@ -122,32 +539,22 @@ TBuiltIns::~TBuiltIns()
//
void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvVersion)
{
+#ifdef GLSLANG_WEB
+ version = 310;
+ profile = EEsProfile;
+#endif
+ addTabledBuiltins(version, profile, spvVersion);
+
//============================================================================
//
// Prototypes for built-in functions used repeatly by different shaders
//
//============================================================================
+#ifndef GLSLANG_WEB
//
// Derivatives Functions.
//
- TString derivatives (
- "float dFdx(float p);"
- "vec2 dFdx(vec2 p);"
- "vec3 dFdx(vec3 p);"
- "vec4 dFdx(vec4 p);"
-
- "float dFdy(float p);"
- "vec2 dFdy(vec2 p);"
- "vec3 dFdy(vec3 p);"
- "vec4 dFdy(vec4 p);"
-
- "float fwidth(float p);"
- "vec2 fwidth(vec2 p);"
- "vec3 fwidth(vec3 p);"
- "vec4 fwidth(vec4 p);"
- );
-
TString derivativeControls (
"float dFdxFine(float p);"
"vec2 dFdxFine(vec2 p);"
@@ -281,321 +688,9 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
//============================================================================
//
- // Angle and Trigonometric Functions.
- //
- commonBuiltins.append(
- "float radians(float degrees);"
- "vec2 radians(vec2 degrees);"
- "vec3 radians(vec3 degrees);"
- "vec4 radians(vec4 degrees);"
-
- "float degrees(float radians);"
- "vec2 degrees(vec2 radians);"
- "vec3 degrees(vec3 radians);"
- "vec4 degrees(vec4 radians);"
-
- "float sin(float angle);"
- "vec2 sin(vec2 angle);"
- "vec3 sin(vec3 angle);"
- "vec4 sin(vec4 angle);"
-
- "float cos(float angle);"
- "vec2 cos(vec2 angle);"
- "vec3 cos(vec3 angle);"
- "vec4 cos(vec4 angle);"
-
- "float tan(float angle);"
- "vec2 tan(vec2 angle);"
- "vec3 tan(vec3 angle);"
- "vec4 tan(vec4 angle);"
-
- "float asin(float x);"
- "vec2 asin(vec2 x);"
- "vec3 asin(vec3 x);"
- "vec4 asin(vec4 x);"
-
- "float acos(float x);"
- "vec2 acos(vec2 x);"
- "vec3 acos(vec3 x);"
- "vec4 acos(vec4 x);"
-
- "float atan(float y, float x);"
- "vec2 atan(vec2 y, vec2 x);"
- "vec3 atan(vec3 y, vec3 x);"
- "vec4 atan(vec4 y, vec4 x);"
-
- "float atan(float y_over_x);"
- "vec2 atan(vec2 y_over_x);"
- "vec3 atan(vec3 y_over_x);"
- "vec4 atan(vec4 y_over_x);"
-
- "\n");
-
- if (version >= 130) {
- commonBuiltins.append(
- "float sinh(float angle);"
- "vec2 sinh(vec2 angle);"
- "vec3 sinh(vec3 angle);"
- "vec4 sinh(vec4 angle);"
-
- "float cosh(float angle);"
- "vec2 cosh(vec2 angle);"
- "vec3 cosh(vec3 angle);"
- "vec4 cosh(vec4 angle);"
-
- "float tanh(float angle);"
- "vec2 tanh(vec2 angle);"
- "vec3 tanh(vec3 angle);"
- "vec4 tanh(vec4 angle);"
-
- "float asinh(float x);"
- "vec2 asinh(vec2 x);"
- "vec3 asinh(vec3 x);"
- "vec4 asinh(vec4 x);"
-
- "float acosh(float x);"
- "vec2 acosh(vec2 x);"
- "vec3 acosh(vec3 x);"
- "vec4 acosh(vec4 x);"
-
- "float atanh(float y_over_x);"
- "vec2 atanh(vec2 y_over_x);"
- "vec3 atanh(vec3 y_over_x);"
- "vec4 atanh(vec4 y_over_x);"
-
- "\n");
- }
-
- //
- // Exponential Functions.
- //
- commonBuiltins.append(
- "float pow(float x, float y);"
- "vec2 pow(vec2 x, vec2 y);"
- "vec3 pow(vec3 x, vec3 y);"
- "vec4 pow(vec4 x, vec4 y);"
-
- "float exp(float x);"
- "vec2 exp(vec2 x);"
- "vec3 exp(vec3 x);"
- "vec4 exp(vec4 x);"
-
- "float log(float x);"
- "vec2 log(vec2 x);"
- "vec3 log(vec3 x);"
- "vec4 log(vec4 x);"
-
- "float exp2(float x);"
- "vec2 exp2(vec2 x);"
- "vec3 exp2(vec3 x);"
- "vec4 exp2(vec4 x);"
-
- "float log2(float x);"
- "vec2 log2(vec2 x);"
- "vec3 log2(vec3 x);"
- "vec4 log2(vec4 x);"
-
- "float sqrt(float x);"
- "vec2 sqrt(vec2 x);"
- "vec3 sqrt(vec3 x);"
- "vec4 sqrt(vec4 x);"
-
- "float inversesqrt(float x);"
- "vec2 inversesqrt(vec2 x);"
- "vec3 inversesqrt(vec3 x);"
- "vec4 inversesqrt(vec4 x);"
-
- "\n");
-
- //
- // Common Functions.
- //
- commonBuiltins.append(
- "float abs(float x);"
- "vec2 abs(vec2 x);"
- "vec3 abs(vec3 x);"
- "vec4 abs(vec4 x);"
-
- "float sign(float x);"
- "vec2 sign(vec2 x);"
- "vec3 sign(vec3 x);"
- "vec4 sign(vec4 x);"
-
- "float floor(float x);"
- "vec2 floor(vec2 x);"
- "vec3 floor(vec3 x);"
- "vec4 floor(vec4 x);"
-
- "float ceil(float x);"
- "vec2 ceil(vec2 x);"
- "vec3 ceil(vec3 x);"
- "vec4 ceil(vec4 x);"
-
- "float fract(float x);"
- "vec2 fract(vec2 x);"
- "vec3 fract(vec3 x);"
- "vec4 fract(vec4 x);"
-
- "float mod(float x, float y);"
- "vec2 mod(vec2 x, float y);"
- "vec3 mod(vec3 x, float y);"
- "vec4 mod(vec4 x, float y);"
- "vec2 mod(vec2 x, vec2 y);"
- "vec3 mod(vec3 x, vec3 y);"
- "vec4 mod(vec4 x, vec4 y);"
-
- "float min(float x, float y);"
- "vec2 min(vec2 x, float y);"
- "vec3 min(vec3 x, float y);"
- "vec4 min(vec4 x, float y);"
- "vec2 min(vec2 x, vec2 y);"
- "vec3 min(vec3 x, vec3 y);"
- "vec4 min(vec4 x, vec4 y);"
-
- "float max(float x, float y);"
- "vec2 max(vec2 x, float y);"
- "vec3 max(vec3 x, float y);"
- "vec4 max(vec4 x, float y);"
- "vec2 max(vec2 x, vec2 y);"
- "vec3 max(vec3 x, vec3 y);"
- "vec4 max(vec4 x, vec4 y);"
-
- "float clamp(float x, float minVal, float maxVal);"
- "vec2 clamp(vec2 x, float minVal, float maxVal);"
- "vec3 clamp(vec3 x, float minVal, float maxVal);"
- "vec4 clamp(vec4 x, float minVal, float maxVal);"
- "vec2 clamp(vec2 x, vec2 minVal, vec2 maxVal);"
- "vec3 clamp(vec3 x, vec3 minVal, vec3 maxVal);"
- "vec4 clamp(vec4 x, vec4 minVal, vec4 maxVal);"
-
- "float mix(float x, float y, float a);"
- "vec2 mix(vec2 x, vec2 y, float a);"
- "vec3 mix(vec3 x, vec3 y, float a);"
- "vec4 mix(vec4 x, vec4 y, float a);"
- "vec2 mix(vec2 x, vec2 y, vec2 a);"
- "vec3 mix(vec3 x, vec3 y, vec3 a);"
- "vec4 mix(vec4 x, vec4 y, vec4 a);"
-
- "float step(float edge, float x);"
- "vec2 step(vec2 edge, vec2 x);"
- "vec3 step(vec3 edge, vec3 x);"
- "vec4 step(vec4 edge, vec4 x);"
- "vec2 step(float edge, vec2 x);"
- "vec3 step(float edge, vec3 x);"
- "vec4 step(float edge, vec4 x);"
-
- "float smoothstep(float edge0, float edge1, float x);"
- "vec2 smoothstep(vec2 edge0, vec2 edge1, vec2 x);"
- "vec3 smoothstep(vec3 edge0, vec3 edge1, vec3 x);"
- "vec4 smoothstep(vec4 edge0, vec4 edge1, vec4 x);"
- "vec2 smoothstep(float edge0, float edge1, vec2 x);"
- "vec3 smoothstep(float edge0, float edge1, vec3 x);"
- "vec4 smoothstep(float edge0, float edge1, vec4 x);"
-
- "\n");
-
- if (version >= 130) {
- commonBuiltins.append(
- " int abs( int x);"
- "ivec2 abs(ivec2 x);"
- "ivec3 abs(ivec3 x);"
- "ivec4 abs(ivec4 x);"
-
- " int sign( int x);"
- "ivec2 sign(ivec2 x);"
- "ivec3 sign(ivec3 x);"
- "ivec4 sign(ivec4 x);"
-
- "float trunc(float x);"
- "vec2 trunc(vec2 x);"
- "vec3 trunc(vec3 x);"
- "vec4 trunc(vec4 x);"
-
- "float round(float x);"
- "vec2 round(vec2 x);"
- "vec3 round(vec3 x);"
- "vec4 round(vec4 x);"
-
- "float roundEven(float x);"
- "vec2 roundEven(vec2 x);"
- "vec3 roundEven(vec3 x);"
- "vec4 roundEven(vec4 x);"
-
- "float modf(float, out float);"
- "vec2 modf(vec2, out vec2 );"
- "vec3 modf(vec3, out vec3 );"
- "vec4 modf(vec4, out vec4 );"
-
- " int min(int x, int y);"
- "ivec2 min(ivec2 x, int y);"
- "ivec3 min(ivec3 x, int y);"
- "ivec4 min(ivec4 x, int y);"
- "ivec2 min(ivec2 x, ivec2 y);"
- "ivec3 min(ivec3 x, ivec3 y);"
- "ivec4 min(ivec4 x, ivec4 y);"
-
- " uint min(uint x, uint y);"
- "uvec2 min(uvec2 x, uint y);"
- "uvec3 min(uvec3 x, uint y);"
- "uvec4 min(uvec4 x, uint y);"
- "uvec2 min(uvec2 x, uvec2 y);"
- "uvec3 min(uvec3 x, uvec3 y);"
- "uvec4 min(uvec4 x, uvec4 y);"
-
- " int max(int x, int y);"
- "ivec2 max(ivec2 x, int y);"
- "ivec3 max(ivec3 x, int y);"
- "ivec4 max(ivec4 x, int y);"
- "ivec2 max(ivec2 x, ivec2 y);"
- "ivec3 max(ivec3 x, ivec3 y);"
- "ivec4 max(ivec4 x, ivec4 y);"
-
- " uint max(uint x, uint y);"
- "uvec2 max(uvec2 x, uint y);"
- "uvec3 max(uvec3 x, uint y);"
- "uvec4 max(uvec4 x, uint y);"
- "uvec2 max(uvec2 x, uvec2 y);"
- "uvec3 max(uvec3 x, uvec3 y);"
- "uvec4 max(uvec4 x, uvec4 y);"
-
- "int clamp(int x, int minVal, int maxVal);"
- "ivec2 clamp(ivec2 x, int minVal, int maxVal);"
- "ivec3 clamp(ivec3 x, int minVal, int maxVal);"
- "ivec4 clamp(ivec4 x, int minVal, int maxVal);"
- "ivec2 clamp(ivec2 x, ivec2 minVal, ivec2 maxVal);"
- "ivec3 clamp(ivec3 x, ivec3 minVal, ivec3 maxVal);"
- "ivec4 clamp(ivec4 x, ivec4 minVal, ivec4 maxVal);"
-
- "uint clamp(uint x, uint minVal, uint maxVal);"
- "uvec2 clamp(uvec2 x, uint minVal, uint maxVal);"
- "uvec3 clamp(uvec3 x, uint minVal, uint maxVal);"
- "uvec4 clamp(uvec4 x, uint minVal, uint maxVal);"
- "uvec2 clamp(uvec2 x, uvec2 minVal, uvec2 maxVal);"
- "uvec3 clamp(uvec3 x, uvec3 minVal, uvec3 maxVal);"
- "uvec4 clamp(uvec4 x, uvec4 minVal, uvec4 maxVal);"
-
- "float mix(float x, float y, bool a);"
- "vec2 mix(vec2 x, vec2 y, bvec2 a);"
- "vec3 mix(vec3 x, vec3 y, bvec3 a);"
- "vec4 mix(vec4 x, vec4 y, bvec4 a);"
-
- "bool isnan(float x);"
- "bvec2 isnan(vec2 x);"
- "bvec3 isnan(vec3 x);"
- "bvec4 isnan(vec4 x);"
-
- "bool isinf(float x);"
- "bvec2 isinf(vec2 x);"
- "bvec3 isinf(vec3 x);"
- "bvec4 isinf(vec4 x);"
-
- "\n");
- }
-
- //
// double functions added to desktop 4.00, but not fma, frexp, ldexp, or pack/unpack
//
- if (profile != EEsProfile && version >= 400) {
+ if (profile != EEsProfile && version >= 150) { // ARB_gpu_shader_fp64
commonBuiltins.append(
"double sqrt(double);"
@@ -959,31 +1054,30 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"bvec3 notEqual(u64vec3, u64vec3);"
"bvec4 notEqual(u64vec4, u64vec4);"
- "int findLSB(int64_t);"
- "ivec2 findLSB(i64vec2);"
- "ivec3 findLSB(i64vec3);"
- "ivec4 findLSB(i64vec4);"
+ "int64_t findLSB(int64_t);"
+ "i64vec2 findLSB(i64vec2);"
+ "i64vec3 findLSB(i64vec3);"
+ "i64vec4 findLSB(i64vec4);"
- "int findLSB(uint64_t);"
- "ivec2 findLSB(u64vec2);"
- "ivec3 findLSB(u64vec3);"
- "ivec4 findLSB(u64vec4);"
+ "int64_t findLSB(uint64_t);"
+ "i64vec2 findLSB(u64vec2);"
+ "i64vec3 findLSB(u64vec3);"
+ "i64vec4 findLSB(u64vec4);"
- "int findMSB(int64_t);"
- "ivec2 findMSB(i64vec2);"
- "ivec3 findMSB(i64vec3);"
- "ivec4 findMSB(i64vec4);"
+ "int64_t findMSB(int64_t);"
+ "i64vec2 findMSB(i64vec2);"
+ "i64vec3 findMSB(i64vec3);"
+ "i64vec4 findMSB(i64vec4);"
- "int findMSB(uint64_t);"
- "ivec2 findMSB(u64vec2);"
- "ivec3 findMSB(u64vec3);"
- "ivec4 findMSB(u64vec4);"
+ "int64_t findMSB(uint64_t);"
+ "i64vec2 findMSB(u64vec2);"
+ "i64vec3 findMSB(u64vec3);"
+ "i64vec4 findMSB(u64vec4);"
"\n"
);
}
-#ifdef AMD_EXTENSIONS
// GL_AMD_shader_trinary_minmax
if (profile != EEsProfile && version >= 430) {
commonBuiltins.append(
@@ -1080,48 +1174,31 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n"
);
}
-#endif
if ((profile == EEsProfile && version >= 310) ||
(profile != EEsProfile && version >= 430)) {
commonBuiltins.append(
- "uint atomicAdd(coherent volatile inout uint, uint);"
- " int atomicAdd(coherent volatile inout int, int);"
"uint atomicAdd(coherent volatile inout uint, uint, int, int, int);"
" int atomicAdd(coherent volatile inout int, int, int, int, int);"
- "uint atomicMin(coherent volatile inout uint, uint);"
- " int atomicMin(coherent volatile inout int, int);"
"uint atomicMin(coherent volatile inout uint, uint, int, int, int);"
" int atomicMin(coherent volatile inout int, int, int, int, int);"
- "uint atomicMax(coherent volatile inout uint, uint);"
- " int atomicMax(coherent volatile inout int, int);"
"uint atomicMax(coherent volatile inout uint, uint, int, int, int);"
" int atomicMax(coherent volatile inout int, int, int, int, int);"
- "uint atomicAnd(coherent volatile inout uint, uint);"
- " int atomicAnd(coherent volatile inout int, int);"
"uint atomicAnd(coherent volatile inout uint, uint, int, int, int);"
" int atomicAnd(coherent volatile inout int, int, int, int, int);"
- "uint atomicOr (coherent volatile inout uint, uint);"
- " int atomicOr (coherent volatile inout int, int);"
"uint atomicOr (coherent volatile inout uint, uint, int, int, int);"
" int atomicOr (coherent volatile inout int, int, int, int, int);"
- "uint atomicXor(coherent volatile inout uint, uint);"
- " int atomicXor(coherent volatile inout int, int);"
"uint atomicXor(coherent volatile inout uint, uint, int, int, int);"
" int atomicXor(coherent volatile inout int, int, int, int, int);"
- "uint atomicExchange(coherent volatile inout uint, uint);"
- " int atomicExchange(coherent volatile inout int, int);"
"uint atomicExchange(coherent volatile inout uint, uint, int, int, int);"
" int atomicExchange(coherent volatile inout int, int, int, int, int);"
- "uint atomicCompSwap(coherent volatile inout uint, uint, uint);"
- " int atomicCompSwap(coherent volatile inout int, int, int);"
"uint atomicCompSwap(coherent volatile inout uint, uint, uint, int, int, int, int, int);"
" int atomicCompSwap(coherent volatile inout int, int, int, int, int, int, int, int);"
@@ -1183,27 +1260,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void atomicStore(coherent volatile out int64_t, int64_t, int, int, int);"
"\n");
}
-
- if ((profile == EEsProfile && version >= 310) ||
- (profile != EEsProfile && version >= 450)) {
- commonBuiltins.append(
- "int mix(int x, int y, bool a);"
- "ivec2 mix(ivec2 x, ivec2 y, bvec2 a);"
- "ivec3 mix(ivec3 x, ivec3 y, bvec3 a);"
- "ivec4 mix(ivec4 x, ivec4 y, bvec4 a);"
-
- "uint mix(uint x, uint y, bool a);"
- "uvec2 mix(uvec2 x, uvec2 y, bvec2 a);"
- "uvec3 mix(uvec3 x, uvec3 y, bvec3 a);"
- "uvec4 mix(uvec4 x, uvec4 y, bvec4 a);"
-
- "bool mix(bool x, bool y, bool a);"
- "bvec2 mix(bvec2 x, bvec2 y, bvec2 a);"
- "bvec3 mix(bvec3 x, bvec3 y, bvec3 a);"
- "bvec4 mix(bvec4 x, bvec4 y, bvec4 a);"
-
- "\n");
- }
+#endif
if ((profile == EEsProfile && version >= 300) ||
(profile != EEsProfile && version >= 330)) {
@@ -1231,6 +1288,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
+#ifndef GLSLANG_WEB
if ((profile != EEsProfile && version >= 400) ||
(profile == EEsProfile && version >= 310)) { // GL_OES_gpu_shader5
@@ -1240,15 +1298,15 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"vec3 fma(vec3, vec3, vec3 );"
"vec4 fma(vec4, vec4, vec4 );"
"\n");
+ }
- if (profile != EEsProfile) {
+ if (profile != EEsProfile && version >= 150) { // ARB_gpu_shader_fp64
commonBuiltins.append(
"double fma(double, double, double);"
"dvec2 fma(dvec2, dvec2, dvec2 );"
"dvec3 fma(dvec3, dvec3, dvec3 );"
"dvec4 fma(dvec4, dvec4, dvec4 );"
"\n");
- }
}
if ((profile == EEsProfile && version >= 310) ||
@@ -1267,7 +1325,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
- if (profile != EEsProfile && version >= 400) {
+ if (profile != EEsProfile && version >= 150) { // ARB_gpu_shader_fp64
commonBuiltins.append(
"double frexp(double, out int);"
"dvec2 frexp( dvec2, out ivec2);"
@@ -1284,6 +1342,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
+#endif
if ((profile == EEsProfile && version >= 300) ||
(profile != EEsProfile && version >= 400)) {
@@ -1312,6 +1371,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
+#ifndef GLSLANG_WEB
if ((profile == EEsProfile && version >= 310) ||
(profile != EEsProfile && version >= 400)) {
commonBuiltins.append(
@@ -1331,48 +1391,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"vec4 unpackUnorm4x8(highp uint);"
"\n");
}
-
- //
- // Geometric Functions.
- //
- commonBuiltins.append(
- "float length(float x);"
- "float length(vec2 x);"
- "float length(vec3 x);"
- "float length(vec4 x);"
-
- "float distance(float p0, float p1);"
- "float distance(vec2 p0, vec2 p1);"
- "float distance(vec3 p0, vec3 p1);"
- "float distance(vec4 p0, vec4 p1);"
-
- "float dot(float x, float y);"
- "float dot(vec2 x, vec2 y);"
- "float dot(vec3 x, vec3 y);"
- "float dot(vec4 x, vec4 y);"
-
- "vec3 cross(vec3 x, vec3 y);"
- "float normalize(float x);"
- "vec2 normalize(vec2 x);"
- "vec3 normalize(vec3 x);"
- "vec4 normalize(vec4 x);"
-
- "float faceforward(float N, float I, float Nref);"
- "vec2 faceforward(vec2 N, vec2 I, vec2 Nref);"
- "vec3 faceforward(vec3 N, vec3 I, vec3 Nref);"
- "vec4 faceforward(vec4 N, vec4 I, vec4 Nref);"
-
- "float reflect(float I, float N);"
- "vec2 reflect(vec2 I, vec2 N);"
- "vec3 reflect(vec3 I, vec3 N);"
- "vec4 reflect(vec4 I, vec4 N);"
-
- "float refract(float I, float N, float eta);"
- "vec2 refract(vec2 I, vec2 N, float eta);"
- "vec3 refract(vec3 I, vec3 N, float eta);"
- "vec4 refract(vec4 I, vec4 N, float eta);"
-
- "\n");
+#endif
//
// Matrix Functions.
@@ -1431,109 +1450,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
}
}
- //
- // Vector relational functions.
- //
- commonBuiltins.append(
- "bvec2 lessThan(vec2 x, vec2 y);"
- "bvec3 lessThan(vec3 x, vec3 y);"
- "bvec4 lessThan(vec4 x, vec4 y);"
-
- "bvec2 lessThan(ivec2 x, ivec2 y);"
- "bvec3 lessThan(ivec3 x, ivec3 y);"
- "bvec4 lessThan(ivec4 x, ivec4 y);"
-
- "bvec2 lessThanEqual(vec2 x, vec2 y);"
- "bvec3 lessThanEqual(vec3 x, vec3 y);"
- "bvec4 lessThanEqual(vec4 x, vec4 y);"
-
- "bvec2 lessThanEqual(ivec2 x, ivec2 y);"
- "bvec3 lessThanEqual(ivec3 x, ivec3 y);"
- "bvec4 lessThanEqual(ivec4 x, ivec4 y);"
-
- "bvec2 greaterThan(vec2 x, vec2 y);"
- "bvec3 greaterThan(vec3 x, vec3 y);"
- "bvec4 greaterThan(vec4 x, vec4 y);"
-
- "bvec2 greaterThan(ivec2 x, ivec2 y);"
- "bvec3 greaterThan(ivec3 x, ivec3 y);"
- "bvec4 greaterThan(ivec4 x, ivec4 y);"
-
- "bvec2 greaterThanEqual(vec2 x, vec2 y);"
- "bvec3 greaterThanEqual(vec3 x, vec3 y);"
- "bvec4 greaterThanEqual(vec4 x, vec4 y);"
-
- "bvec2 greaterThanEqual(ivec2 x, ivec2 y);"
- "bvec3 greaterThanEqual(ivec3 x, ivec3 y);"
- "bvec4 greaterThanEqual(ivec4 x, ivec4 y);"
-
- "bvec2 equal(vec2 x, vec2 y);"
- "bvec3 equal(vec3 x, vec3 y);"
- "bvec4 equal(vec4 x, vec4 y);"
-
- "bvec2 equal(ivec2 x, ivec2 y);"
- "bvec3 equal(ivec3 x, ivec3 y);"
- "bvec4 equal(ivec4 x, ivec4 y);"
-
- "bvec2 equal(bvec2 x, bvec2 y);"
- "bvec3 equal(bvec3 x, bvec3 y);"
- "bvec4 equal(bvec4 x, bvec4 y);"
-
- "bvec2 notEqual(vec2 x, vec2 y);"
- "bvec3 notEqual(vec3 x, vec3 y);"
- "bvec4 notEqual(vec4 x, vec4 y);"
-
- "bvec2 notEqual(ivec2 x, ivec2 y);"
- "bvec3 notEqual(ivec3 x, ivec3 y);"
- "bvec4 notEqual(ivec4 x, ivec4 y);"
-
- "bvec2 notEqual(bvec2 x, bvec2 y);"
- "bvec3 notEqual(bvec3 x, bvec3 y);"
- "bvec4 notEqual(bvec4 x, bvec4 y);"
-
- "bool any(bvec2 x);"
- "bool any(bvec3 x);"
- "bool any(bvec4 x);"
-
- "bool all(bvec2 x);"
- "bool all(bvec3 x);"
- "bool all(bvec4 x);"
-
- "bvec2 not(bvec2 x);"
- "bvec3 not(bvec3 x);"
- "bvec4 not(bvec4 x);"
-
- "\n");
-
- if (version >= 130) {
- commonBuiltins.append(
- "bvec2 lessThan(uvec2 x, uvec2 y);"
- "bvec3 lessThan(uvec3 x, uvec3 y);"
- "bvec4 lessThan(uvec4 x, uvec4 y);"
-
- "bvec2 lessThanEqual(uvec2 x, uvec2 y);"
- "bvec3 lessThanEqual(uvec3 x, uvec3 y);"
- "bvec4 lessThanEqual(uvec4 x, uvec4 y);"
-
- "bvec2 greaterThan(uvec2 x, uvec2 y);"
- "bvec3 greaterThan(uvec3 x, uvec3 y);"
- "bvec4 greaterThan(uvec4 x, uvec4 y);"
-
- "bvec2 greaterThanEqual(uvec2 x, uvec2 y);"
- "bvec3 greaterThanEqual(uvec3 x, uvec3 y);"
- "bvec4 greaterThanEqual(uvec4 x, uvec4 y);"
-
- "bvec2 equal(uvec2 x, uvec2 y);"
- "bvec3 equal(uvec3 x, uvec3 y);"
- "bvec4 equal(uvec4 x, uvec4 y);"
-
- "bvec2 notEqual(uvec2 x, uvec2 y);"
- "bvec3 notEqual(uvec3 x, uvec3 y);"
- "bvec4 notEqual(uvec4 x, uvec4 y);"
-
- "\n");
- }
-
+#ifndef GLSLANG_WEB
//
// Original-style texture functions existing in all stages.
// (Per-stage functions below.)
@@ -1884,58 +1801,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"bool subgroupAll(bool);\n"
"bool subgroupAny(bool);\n"
-
- "bool subgroupAllEqual(float);\n"
- "bool subgroupAllEqual(vec2);\n"
- "bool subgroupAllEqual(vec3);\n"
- "bool subgroupAllEqual(vec4);\n"
- "bool subgroupAllEqual(int);\n"
- "bool subgroupAllEqual(ivec2);\n"
- "bool subgroupAllEqual(ivec3);\n"
- "bool subgroupAllEqual(ivec4);\n"
- "bool subgroupAllEqual(uint);\n"
- "bool subgroupAllEqual(uvec2);\n"
- "bool subgroupAllEqual(uvec3);\n"
- "bool subgroupAllEqual(uvec4);\n"
- "bool subgroupAllEqual(bool);\n"
- "bool subgroupAllEqual(bvec2);\n"
- "bool subgroupAllEqual(bvec3);\n"
- "bool subgroupAllEqual(bvec4);\n"
-
- "float subgroupBroadcast(float, uint);\n"
- "vec2 subgroupBroadcast(vec2, uint);\n"
- "vec3 subgroupBroadcast(vec3, uint);\n"
- "vec4 subgroupBroadcast(vec4, uint);\n"
- "int subgroupBroadcast(int, uint);\n"
- "ivec2 subgroupBroadcast(ivec2, uint);\n"
- "ivec3 subgroupBroadcast(ivec3, uint);\n"
- "ivec4 subgroupBroadcast(ivec4, uint);\n"
- "uint subgroupBroadcast(uint, uint);\n"
- "uvec2 subgroupBroadcast(uvec2, uint);\n"
- "uvec3 subgroupBroadcast(uvec3, uint);\n"
- "uvec4 subgroupBroadcast(uvec4, uint);\n"
- "bool subgroupBroadcast(bool, uint);\n"
- "bvec2 subgroupBroadcast(bvec2, uint);\n"
- "bvec3 subgroupBroadcast(bvec3, uint);\n"
- "bvec4 subgroupBroadcast(bvec4, uint);\n"
-
- "float subgroupBroadcastFirst(float);\n"
- "vec2 subgroupBroadcastFirst(vec2);\n"
- "vec3 subgroupBroadcastFirst(vec3);\n"
- "vec4 subgroupBroadcastFirst(vec4);\n"
- "int subgroupBroadcastFirst(int);\n"
- "ivec2 subgroupBroadcastFirst(ivec2);\n"
- "ivec3 subgroupBroadcastFirst(ivec3);\n"
- "ivec4 subgroupBroadcastFirst(ivec4);\n"
- "uint subgroupBroadcastFirst(uint);\n"
- "uvec2 subgroupBroadcastFirst(uvec2);\n"
- "uvec3 subgroupBroadcastFirst(uvec3);\n"
- "uvec4 subgroupBroadcastFirst(uvec4);\n"
- "bool subgroupBroadcastFirst(bool);\n"
- "bvec2 subgroupBroadcastFirst(bvec2);\n"
- "bvec3 subgroupBroadcastFirst(bvec3);\n"
- "bvec4 subgroupBroadcastFirst(bvec4);\n"
-
"uvec4 subgroupBallot(bool);\n"
"bool subgroupInverseBallot(uvec4);\n"
"bool subgroupBallotBitExtract(uvec4, uint);\n"
@@ -1944,1015 +1809,136 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"uint subgroupBallotExclusiveBitCount(uvec4);\n"
"uint subgroupBallotFindLSB(uvec4);\n"
"uint subgroupBallotFindMSB(uvec4);\n"
+ );
- "float subgroupShuffle(float, uint);\n"
- "vec2 subgroupShuffle(vec2, uint);\n"
- "vec3 subgroupShuffle(vec3, uint);\n"
- "vec4 subgroupShuffle(vec4, uint);\n"
- "int subgroupShuffle(int, uint);\n"
- "ivec2 subgroupShuffle(ivec2, uint);\n"
- "ivec3 subgroupShuffle(ivec3, uint);\n"
- "ivec4 subgroupShuffle(ivec4, uint);\n"
- "uint subgroupShuffle(uint, uint);\n"
- "uvec2 subgroupShuffle(uvec2, uint);\n"
- "uvec3 subgroupShuffle(uvec3, uint);\n"
- "uvec4 subgroupShuffle(uvec4, uint);\n"
- "bool subgroupShuffle(bool, uint);\n"
- "bvec2 subgroupShuffle(bvec2, uint);\n"
- "bvec3 subgroupShuffle(bvec3, uint);\n"
- "bvec4 subgroupShuffle(bvec4, uint);\n"
-
- "float subgroupShuffleXor(float, uint);\n"
- "vec2 subgroupShuffleXor(vec2, uint);\n"
- "vec3 subgroupShuffleXor(vec3, uint);\n"
- "vec4 subgroupShuffleXor(vec4, uint);\n"
- "int subgroupShuffleXor(int, uint);\n"
- "ivec2 subgroupShuffleXor(ivec2, uint);\n"
- "ivec3 subgroupShuffleXor(ivec3, uint);\n"
- "ivec4 subgroupShuffleXor(ivec4, uint);\n"
- "uint subgroupShuffleXor(uint, uint);\n"
- "uvec2 subgroupShuffleXor(uvec2, uint);\n"
- "uvec3 subgroupShuffleXor(uvec3, uint);\n"
- "uvec4 subgroupShuffleXor(uvec4, uint);\n"
- "bool subgroupShuffleXor(bool, uint);\n"
- "bvec2 subgroupShuffleXor(bvec2, uint);\n"
- "bvec3 subgroupShuffleXor(bvec3, uint);\n"
- "bvec4 subgroupShuffleXor(bvec4, uint);\n"
-
- "float subgroupShuffleUp(float, uint delta);\n"
- "vec2 subgroupShuffleUp(vec2, uint delta);\n"
- "vec3 subgroupShuffleUp(vec3, uint delta);\n"
- "vec4 subgroupShuffleUp(vec4, uint delta);\n"
- "int subgroupShuffleUp(int, uint delta);\n"
- "ivec2 subgroupShuffleUp(ivec2, uint delta);\n"
- "ivec3 subgroupShuffleUp(ivec3, uint delta);\n"
- "ivec4 subgroupShuffleUp(ivec4, uint delta);\n"
- "uint subgroupShuffleUp(uint, uint delta);\n"
- "uvec2 subgroupShuffleUp(uvec2, uint delta);\n"
- "uvec3 subgroupShuffleUp(uvec3, uint delta);\n"
- "uvec4 subgroupShuffleUp(uvec4, uint delta);\n"
- "bool subgroupShuffleUp(bool, uint delta);\n"
- "bvec2 subgroupShuffleUp(bvec2, uint delta);\n"
- "bvec3 subgroupShuffleUp(bvec3, uint delta);\n"
- "bvec4 subgroupShuffleUp(bvec4, uint delta);\n"
-
- "float subgroupShuffleDown(float, uint delta);\n"
- "vec2 subgroupShuffleDown(vec2, uint delta);\n"
- "vec3 subgroupShuffleDown(vec3, uint delta);\n"
- "vec4 subgroupShuffleDown(vec4, uint delta);\n"
- "int subgroupShuffleDown(int, uint delta);\n"
- "ivec2 subgroupShuffleDown(ivec2, uint delta);\n"
- "ivec3 subgroupShuffleDown(ivec3, uint delta);\n"
- "ivec4 subgroupShuffleDown(ivec4, uint delta);\n"
- "uint subgroupShuffleDown(uint, uint delta);\n"
- "uvec2 subgroupShuffleDown(uvec2, uint delta);\n"
- "uvec3 subgroupShuffleDown(uvec3, uint delta);\n"
- "uvec4 subgroupShuffleDown(uvec4, uint delta);\n"
- "bool subgroupShuffleDown(bool, uint delta);\n"
- "bvec2 subgroupShuffleDown(bvec2, uint delta);\n"
- "bvec3 subgroupShuffleDown(bvec3, uint delta);\n"
- "bvec4 subgroupShuffleDown(bvec4, uint delta);\n"
-
- "float subgroupAdd(float);\n"
- "vec2 subgroupAdd(vec2);\n"
- "vec3 subgroupAdd(vec3);\n"
- "vec4 subgroupAdd(vec4);\n"
- "int subgroupAdd(int);\n"
- "ivec2 subgroupAdd(ivec2);\n"
- "ivec3 subgroupAdd(ivec3);\n"
- "ivec4 subgroupAdd(ivec4);\n"
- "uint subgroupAdd(uint);\n"
- "uvec2 subgroupAdd(uvec2);\n"
- "uvec3 subgroupAdd(uvec3);\n"
- "uvec4 subgroupAdd(uvec4);\n"
-
- "float subgroupMul(float);\n"
- "vec2 subgroupMul(vec2);\n"
- "vec3 subgroupMul(vec3);\n"
- "vec4 subgroupMul(vec4);\n"
- "int subgroupMul(int);\n"
- "ivec2 subgroupMul(ivec2);\n"
- "ivec3 subgroupMul(ivec3);\n"
- "ivec4 subgroupMul(ivec4);\n"
- "uint subgroupMul(uint);\n"
- "uvec2 subgroupMul(uvec2);\n"
- "uvec3 subgroupMul(uvec3);\n"
- "uvec4 subgroupMul(uvec4);\n"
-
- "float subgroupMin(float);\n"
- "vec2 subgroupMin(vec2);\n"
- "vec3 subgroupMin(vec3);\n"
- "vec4 subgroupMin(vec4);\n"
- "int subgroupMin(int);\n"
- "ivec2 subgroupMin(ivec2);\n"
- "ivec3 subgroupMin(ivec3);\n"
- "ivec4 subgroupMin(ivec4);\n"
- "uint subgroupMin(uint);\n"
- "uvec2 subgroupMin(uvec2);\n"
- "uvec3 subgroupMin(uvec3);\n"
- "uvec4 subgroupMin(uvec4);\n"
-
- "float subgroupMax(float);\n"
- "vec2 subgroupMax(vec2);\n"
- "vec3 subgroupMax(vec3);\n"
- "vec4 subgroupMax(vec4);\n"
- "int subgroupMax(int);\n"
- "ivec2 subgroupMax(ivec2);\n"
- "ivec3 subgroupMax(ivec3);\n"
- "ivec4 subgroupMax(ivec4);\n"
- "uint subgroupMax(uint);\n"
- "uvec2 subgroupMax(uvec2);\n"
- "uvec3 subgroupMax(uvec3);\n"
- "uvec4 subgroupMax(uvec4);\n"
-
- "int subgroupAnd(int);\n"
- "ivec2 subgroupAnd(ivec2);\n"
- "ivec3 subgroupAnd(ivec3);\n"
- "ivec4 subgroupAnd(ivec4);\n"
- "uint subgroupAnd(uint);\n"
- "uvec2 subgroupAnd(uvec2);\n"
- "uvec3 subgroupAnd(uvec3);\n"
- "uvec4 subgroupAnd(uvec4);\n"
- "bool subgroupAnd(bool);\n"
- "bvec2 subgroupAnd(bvec2);\n"
- "bvec3 subgroupAnd(bvec3);\n"
- "bvec4 subgroupAnd(bvec4);\n"
-
- "int subgroupOr(int);\n"
- "ivec2 subgroupOr(ivec2);\n"
- "ivec3 subgroupOr(ivec3);\n"
- "ivec4 subgroupOr(ivec4);\n"
- "uint subgroupOr(uint);\n"
- "uvec2 subgroupOr(uvec2);\n"
- "uvec3 subgroupOr(uvec3);\n"
- "uvec4 subgroupOr(uvec4);\n"
- "bool subgroupOr(bool);\n"
- "bvec2 subgroupOr(bvec2);\n"
- "bvec3 subgroupOr(bvec3);\n"
- "bvec4 subgroupOr(bvec4);\n"
-
- "int subgroupXor(int);\n"
- "ivec2 subgroupXor(ivec2);\n"
- "ivec3 subgroupXor(ivec3);\n"
- "ivec4 subgroupXor(ivec4);\n"
- "uint subgroupXor(uint);\n"
- "uvec2 subgroupXor(uvec2);\n"
- "uvec3 subgroupXor(uvec3);\n"
- "uvec4 subgroupXor(uvec4);\n"
- "bool subgroupXor(bool);\n"
- "bvec2 subgroupXor(bvec2);\n"
- "bvec3 subgroupXor(bvec3);\n"
- "bvec4 subgroupXor(bvec4);\n"
-
- "float subgroupInclusiveAdd(float);\n"
- "vec2 subgroupInclusiveAdd(vec2);\n"
- "vec3 subgroupInclusiveAdd(vec3);\n"
- "vec4 subgroupInclusiveAdd(vec4);\n"
- "int subgroupInclusiveAdd(int);\n"
- "ivec2 subgroupInclusiveAdd(ivec2);\n"
- "ivec3 subgroupInclusiveAdd(ivec3);\n"
- "ivec4 subgroupInclusiveAdd(ivec4);\n"
- "uint subgroupInclusiveAdd(uint);\n"
- "uvec2 subgroupInclusiveAdd(uvec2);\n"
- "uvec3 subgroupInclusiveAdd(uvec3);\n"
- "uvec4 subgroupInclusiveAdd(uvec4);\n"
-
- "float subgroupInclusiveMul(float);\n"
- "vec2 subgroupInclusiveMul(vec2);\n"
- "vec3 subgroupInclusiveMul(vec3);\n"
- "vec4 subgroupInclusiveMul(vec4);\n"
- "int subgroupInclusiveMul(int);\n"
- "ivec2 subgroupInclusiveMul(ivec2);\n"
- "ivec3 subgroupInclusiveMul(ivec3);\n"
- "ivec4 subgroupInclusiveMul(ivec4);\n"
- "uint subgroupInclusiveMul(uint);\n"
- "uvec2 subgroupInclusiveMul(uvec2);\n"
- "uvec3 subgroupInclusiveMul(uvec3);\n"
- "uvec4 subgroupInclusiveMul(uvec4);\n"
-
- "float subgroupInclusiveMin(float);\n"
- "vec2 subgroupInclusiveMin(vec2);\n"
- "vec3 subgroupInclusiveMin(vec3);\n"
- "vec4 subgroupInclusiveMin(vec4);\n"
- "int subgroupInclusiveMin(int);\n"
- "ivec2 subgroupInclusiveMin(ivec2);\n"
- "ivec3 subgroupInclusiveMin(ivec3);\n"
- "ivec4 subgroupInclusiveMin(ivec4);\n"
- "uint subgroupInclusiveMin(uint);\n"
- "uvec2 subgroupInclusiveMin(uvec2);\n"
- "uvec3 subgroupInclusiveMin(uvec3);\n"
- "uvec4 subgroupInclusiveMin(uvec4);\n"
-
- "float subgroupInclusiveMax(float);\n"
- "vec2 subgroupInclusiveMax(vec2);\n"
- "vec3 subgroupInclusiveMax(vec3);\n"
- "vec4 subgroupInclusiveMax(vec4);\n"
- "int subgroupInclusiveMax(int);\n"
- "ivec2 subgroupInclusiveMax(ivec2);\n"
- "ivec3 subgroupInclusiveMax(ivec3);\n"
- "ivec4 subgroupInclusiveMax(ivec4);\n"
- "uint subgroupInclusiveMax(uint);\n"
- "uvec2 subgroupInclusiveMax(uvec2);\n"
- "uvec3 subgroupInclusiveMax(uvec3);\n"
- "uvec4 subgroupInclusiveMax(uvec4);\n"
-
- "int subgroupInclusiveAnd(int);\n"
- "ivec2 subgroupInclusiveAnd(ivec2);\n"
- "ivec3 subgroupInclusiveAnd(ivec3);\n"
- "ivec4 subgroupInclusiveAnd(ivec4);\n"
- "uint subgroupInclusiveAnd(uint);\n"
- "uvec2 subgroupInclusiveAnd(uvec2);\n"
- "uvec3 subgroupInclusiveAnd(uvec3);\n"
- "uvec4 subgroupInclusiveAnd(uvec4);\n"
- "bool subgroupInclusiveAnd(bool);\n"
- "bvec2 subgroupInclusiveAnd(bvec2);\n"
- "bvec3 subgroupInclusiveAnd(bvec3);\n"
- "bvec4 subgroupInclusiveAnd(bvec4);\n"
-
- "int subgroupInclusiveOr(int);\n"
- "ivec2 subgroupInclusiveOr(ivec2);\n"
- "ivec3 subgroupInclusiveOr(ivec3);\n"
- "ivec4 subgroupInclusiveOr(ivec4);\n"
- "uint subgroupInclusiveOr(uint);\n"
- "uvec2 subgroupInclusiveOr(uvec2);\n"
- "uvec3 subgroupInclusiveOr(uvec3);\n"
- "uvec4 subgroupInclusiveOr(uvec4);\n"
- "bool subgroupInclusiveOr(bool);\n"
- "bvec2 subgroupInclusiveOr(bvec2);\n"
- "bvec3 subgroupInclusiveOr(bvec3);\n"
- "bvec4 subgroupInclusiveOr(bvec4);\n"
-
- "int subgroupInclusiveXor(int);\n"
- "ivec2 subgroupInclusiveXor(ivec2);\n"
- "ivec3 subgroupInclusiveXor(ivec3);\n"
- "ivec4 subgroupInclusiveXor(ivec4);\n"
- "uint subgroupInclusiveXor(uint);\n"
- "uvec2 subgroupInclusiveXor(uvec2);\n"
- "uvec3 subgroupInclusiveXor(uvec3);\n"
- "uvec4 subgroupInclusiveXor(uvec4);\n"
- "bool subgroupInclusiveXor(bool);\n"
- "bvec2 subgroupInclusiveXor(bvec2);\n"
- "bvec3 subgroupInclusiveXor(bvec3);\n"
- "bvec4 subgroupInclusiveXor(bvec4);\n"
-
- "float subgroupExclusiveAdd(float);\n"
- "vec2 subgroupExclusiveAdd(vec2);\n"
- "vec3 subgroupExclusiveAdd(vec3);\n"
- "vec4 subgroupExclusiveAdd(vec4);\n"
- "int subgroupExclusiveAdd(int);\n"
- "ivec2 subgroupExclusiveAdd(ivec2);\n"
- "ivec3 subgroupExclusiveAdd(ivec3);\n"
- "ivec4 subgroupExclusiveAdd(ivec4);\n"
- "uint subgroupExclusiveAdd(uint);\n"
- "uvec2 subgroupExclusiveAdd(uvec2);\n"
- "uvec3 subgroupExclusiveAdd(uvec3);\n"
- "uvec4 subgroupExclusiveAdd(uvec4);\n"
-
- "float subgroupExclusiveMul(float);\n"
- "vec2 subgroupExclusiveMul(vec2);\n"
- "vec3 subgroupExclusiveMul(vec3);\n"
- "vec4 subgroupExclusiveMul(vec4);\n"
- "int subgroupExclusiveMul(int);\n"
- "ivec2 subgroupExclusiveMul(ivec2);\n"
- "ivec3 subgroupExclusiveMul(ivec3);\n"
- "ivec4 subgroupExclusiveMul(ivec4);\n"
- "uint subgroupExclusiveMul(uint);\n"
- "uvec2 subgroupExclusiveMul(uvec2);\n"
- "uvec3 subgroupExclusiveMul(uvec3);\n"
- "uvec4 subgroupExclusiveMul(uvec4);\n"
-
- "float subgroupExclusiveMin(float);\n"
- "vec2 subgroupExclusiveMin(vec2);\n"
- "vec3 subgroupExclusiveMin(vec3);\n"
- "vec4 subgroupExclusiveMin(vec4);\n"
- "int subgroupExclusiveMin(int);\n"
- "ivec2 subgroupExclusiveMin(ivec2);\n"
- "ivec3 subgroupExclusiveMin(ivec3);\n"
- "ivec4 subgroupExclusiveMin(ivec4);\n"
- "uint subgroupExclusiveMin(uint);\n"
- "uvec2 subgroupExclusiveMin(uvec2);\n"
- "uvec3 subgroupExclusiveMin(uvec3);\n"
- "uvec4 subgroupExclusiveMin(uvec4);\n"
-
- "float subgroupExclusiveMax(float);\n"
- "vec2 subgroupExclusiveMax(vec2);\n"
- "vec3 subgroupExclusiveMax(vec3);\n"
- "vec4 subgroupExclusiveMax(vec4);\n"
- "int subgroupExclusiveMax(int);\n"
- "ivec2 subgroupExclusiveMax(ivec2);\n"
- "ivec3 subgroupExclusiveMax(ivec3);\n"
- "ivec4 subgroupExclusiveMax(ivec4);\n"
- "uint subgroupExclusiveMax(uint);\n"
- "uvec2 subgroupExclusiveMax(uvec2);\n"
- "uvec3 subgroupExclusiveMax(uvec3);\n"
- "uvec4 subgroupExclusiveMax(uvec4);\n"
-
- "int subgroupExclusiveAnd(int);\n"
- "ivec2 subgroupExclusiveAnd(ivec2);\n"
- "ivec3 subgroupExclusiveAnd(ivec3);\n"
- "ivec4 subgroupExclusiveAnd(ivec4);\n"
- "uint subgroupExclusiveAnd(uint);\n"
- "uvec2 subgroupExclusiveAnd(uvec2);\n"
- "uvec3 subgroupExclusiveAnd(uvec3);\n"
- "uvec4 subgroupExclusiveAnd(uvec4);\n"
- "bool subgroupExclusiveAnd(bool);\n"
- "bvec2 subgroupExclusiveAnd(bvec2);\n"
- "bvec3 subgroupExclusiveAnd(bvec3);\n"
- "bvec4 subgroupExclusiveAnd(bvec4);\n"
-
- "int subgroupExclusiveOr(int);\n"
- "ivec2 subgroupExclusiveOr(ivec2);\n"
- "ivec3 subgroupExclusiveOr(ivec3);\n"
- "ivec4 subgroupExclusiveOr(ivec4);\n"
- "uint subgroupExclusiveOr(uint);\n"
- "uvec2 subgroupExclusiveOr(uvec2);\n"
- "uvec3 subgroupExclusiveOr(uvec3);\n"
- "uvec4 subgroupExclusiveOr(uvec4);\n"
- "bool subgroupExclusiveOr(bool);\n"
- "bvec2 subgroupExclusiveOr(bvec2);\n"
- "bvec3 subgroupExclusiveOr(bvec3);\n"
- "bvec4 subgroupExclusiveOr(bvec4);\n"
-
- "int subgroupExclusiveXor(int);\n"
- "ivec2 subgroupExclusiveXor(ivec2);\n"
- "ivec3 subgroupExclusiveXor(ivec3);\n"
- "ivec4 subgroupExclusiveXor(ivec4);\n"
- "uint subgroupExclusiveXor(uint);\n"
- "uvec2 subgroupExclusiveXor(uvec2);\n"
- "uvec3 subgroupExclusiveXor(uvec3);\n"
- "uvec4 subgroupExclusiveXor(uvec4);\n"
- "bool subgroupExclusiveXor(bool);\n"
- "bvec2 subgroupExclusiveXor(bvec2);\n"
- "bvec3 subgroupExclusiveXor(bvec3);\n"
- "bvec4 subgroupExclusiveXor(bvec4);\n"
-
- "float subgroupClusteredAdd(float, uint);\n"
- "vec2 subgroupClusteredAdd(vec2, uint);\n"
- "vec3 subgroupClusteredAdd(vec3, uint);\n"
- "vec4 subgroupClusteredAdd(vec4, uint);\n"
- "int subgroupClusteredAdd(int, uint);\n"
- "ivec2 subgroupClusteredAdd(ivec2, uint);\n"
- "ivec3 subgroupClusteredAdd(ivec3, uint);\n"
- "ivec4 subgroupClusteredAdd(ivec4, uint);\n"
- "uint subgroupClusteredAdd(uint, uint);\n"
- "uvec2 subgroupClusteredAdd(uvec2, uint);\n"
- "uvec3 subgroupClusteredAdd(uvec3, uint);\n"
- "uvec4 subgroupClusteredAdd(uvec4, uint);\n"
-
- "float subgroupClusteredMul(float, uint);\n"
- "vec2 subgroupClusteredMul(vec2, uint);\n"
- "vec3 subgroupClusteredMul(vec3, uint);\n"
- "vec4 subgroupClusteredMul(vec4, uint);\n"
- "int subgroupClusteredMul(int, uint);\n"
- "ivec2 subgroupClusteredMul(ivec2, uint);\n"
- "ivec3 subgroupClusteredMul(ivec3, uint);\n"
- "ivec4 subgroupClusteredMul(ivec4, uint);\n"
- "uint subgroupClusteredMul(uint, uint);\n"
- "uvec2 subgroupClusteredMul(uvec2, uint);\n"
- "uvec3 subgroupClusteredMul(uvec3, uint);\n"
- "uvec4 subgroupClusteredMul(uvec4, uint);\n"
-
- "float subgroupClusteredMin(float, uint);\n"
- "vec2 subgroupClusteredMin(vec2, uint);\n"
- "vec3 subgroupClusteredMin(vec3, uint);\n"
- "vec4 subgroupClusteredMin(vec4, uint);\n"
- "int subgroupClusteredMin(int, uint);\n"
- "ivec2 subgroupClusteredMin(ivec2, uint);\n"
- "ivec3 subgroupClusteredMin(ivec3, uint);\n"
- "ivec4 subgroupClusteredMin(ivec4, uint);\n"
- "uint subgroupClusteredMin(uint, uint);\n"
- "uvec2 subgroupClusteredMin(uvec2, uint);\n"
- "uvec3 subgroupClusteredMin(uvec3, uint);\n"
- "uvec4 subgroupClusteredMin(uvec4, uint);\n"
-
- "float subgroupClusteredMax(float, uint);\n"
- "vec2 subgroupClusteredMax(vec2, uint);\n"
- "vec3 subgroupClusteredMax(vec3, uint);\n"
- "vec4 subgroupClusteredMax(vec4, uint);\n"
- "int subgroupClusteredMax(int, uint);\n"
- "ivec2 subgroupClusteredMax(ivec2, uint);\n"
- "ivec3 subgroupClusteredMax(ivec3, uint);\n"
- "ivec4 subgroupClusteredMax(ivec4, uint);\n"
- "uint subgroupClusteredMax(uint, uint);\n"
- "uvec2 subgroupClusteredMax(uvec2, uint);\n"
- "uvec3 subgroupClusteredMax(uvec3, uint);\n"
- "uvec4 subgroupClusteredMax(uvec4, uint);\n"
-
- "int subgroupClusteredAnd(int, uint);\n"
- "ivec2 subgroupClusteredAnd(ivec2, uint);\n"
- "ivec3 subgroupClusteredAnd(ivec3, uint);\n"
- "ivec4 subgroupClusteredAnd(ivec4, uint);\n"
- "uint subgroupClusteredAnd(uint, uint);\n"
- "uvec2 subgroupClusteredAnd(uvec2, uint);\n"
- "uvec3 subgroupClusteredAnd(uvec3, uint);\n"
- "uvec4 subgroupClusteredAnd(uvec4, uint);\n"
- "bool subgroupClusteredAnd(bool, uint);\n"
- "bvec2 subgroupClusteredAnd(bvec2, uint);\n"
- "bvec3 subgroupClusteredAnd(bvec3, uint);\n"
- "bvec4 subgroupClusteredAnd(bvec4, uint);\n"
-
- "int subgroupClusteredOr(int, uint);\n"
- "ivec2 subgroupClusteredOr(ivec2, uint);\n"
- "ivec3 subgroupClusteredOr(ivec3, uint);\n"
- "ivec4 subgroupClusteredOr(ivec4, uint);\n"
- "uint subgroupClusteredOr(uint, uint);\n"
- "uvec2 subgroupClusteredOr(uvec2, uint);\n"
- "uvec3 subgroupClusteredOr(uvec3, uint);\n"
- "uvec4 subgroupClusteredOr(uvec4, uint);\n"
- "bool subgroupClusteredOr(bool, uint);\n"
- "bvec2 subgroupClusteredOr(bvec2, uint);\n"
- "bvec3 subgroupClusteredOr(bvec3, uint);\n"
- "bvec4 subgroupClusteredOr(bvec4, uint);\n"
-
- "int subgroupClusteredXor(int, uint);\n"
- "ivec2 subgroupClusteredXor(ivec2, uint);\n"
- "ivec3 subgroupClusteredXor(ivec3, uint);\n"
- "ivec4 subgroupClusteredXor(ivec4, uint);\n"
- "uint subgroupClusteredXor(uint, uint);\n"
- "uvec2 subgroupClusteredXor(uvec2, uint);\n"
- "uvec3 subgroupClusteredXor(uvec3, uint);\n"
- "uvec4 subgroupClusteredXor(uvec4, uint);\n"
- "bool subgroupClusteredXor(bool, uint);\n"
- "bvec2 subgroupClusteredXor(bvec2, uint);\n"
- "bvec3 subgroupClusteredXor(bvec3, uint);\n"
- "bvec4 subgroupClusteredXor(bvec4, uint);\n"
-
- "float subgroupQuadBroadcast(float, uint);\n"
- "vec2 subgroupQuadBroadcast(vec2, uint);\n"
- "vec3 subgroupQuadBroadcast(vec3, uint);\n"
- "vec4 subgroupQuadBroadcast(vec4, uint);\n"
- "int subgroupQuadBroadcast(int, uint);\n"
- "ivec2 subgroupQuadBroadcast(ivec2, uint);\n"
- "ivec3 subgroupQuadBroadcast(ivec3, uint);\n"
- "ivec4 subgroupQuadBroadcast(ivec4, uint);\n"
- "uint subgroupQuadBroadcast(uint, uint);\n"
- "uvec2 subgroupQuadBroadcast(uvec2, uint);\n"
- "uvec3 subgroupQuadBroadcast(uvec3, uint);\n"
- "uvec4 subgroupQuadBroadcast(uvec4, uint);\n"
- "bool subgroupQuadBroadcast(bool, uint);\n"
- "bvec2 subgroupQuadBroadcast(bvec2, uint);\n"
- "bvec3 subgroupQuadBroadcast(bvec3, uint);\n"
- "bvec4 subgroupQuadBroadcast(bvec4, uint);\n"
-
- "float subgroupQuadSwapHorizontal(float);\n"
- "vec2 subgroupQuadSwapHorizontal(vec2);\n"
- "vec3 subgroupQuadSwapHorizontal(vec3);\n"
- "vec4 subgroupQuadSwapHorizontal(vec4);\n"
- "int subgroupQuadSwapHorizontal(int);\n"
- "ivec2 subgroupQuadSwapHorizontal(ivec2);\n"
- "ivec3 subgroupQuadSwapHorizontal(ivec3);\n"
- "ivec4 subgroupQuadSwapHorizontal(ivec4);\n"
- "uint subgroupQuadSwapHorizontal(uint);\n"
- "uvec2 subgroupQuadSwapHorizontal(uvec2);\n"
- "uvec3 subgroupQuadSwapHorizontal(uvec3);\n"
- "uvec4 subgroupQuadSwapHorizontal(uvec4);\n"
- "bool subgroupQuadSwapHorizontal(bool);\n"
- "bvec2 subgroupQuadSwapHorizontal(bvec2);\n"
- "bvec3 subgroupQuadSwapHorizontal(bvec3);\n"
- "bvec4 subgroupQuadSwapHorizontal(bvec4);\n"
-
- "float subgroupQuadSwapVertical(float);\n"
- "vec2 subgroupQuadSwapVertical(vec2);\n"
- "vec3 subgroupQuadSwapVertical(vec3);\n"
- "vec4 subgroupQuadSwapVertical(vec4);\n"
- "int subgroupQuadSwapVertical(int);\n"
- "ivec2 subgroupQuadSwapVertical(ivec2);\n"
- "ivec3 subgroupQuadSwapVertical(ivec3);\n"
- "ivec4 subgroupQuadSwapVertical(ivec4);\n"
- "uint subgroupQuadSwapVertical(uint);\n"
- "uvec2 subgroupQuadSwapVertical(uvec2);\n"
- "uvec3 subgroupQuadSwapVertical(uvec3);\n"
- "uvec4 subgroupQuadSwapVertical(uvec4);\n"
- "bool subgroupQuadSwapVertical(bool);\n"
- "bvec2 subgroupQuadSwapVertical(bvec2);\n"
- "bvec3 subgroupQuadSwapVertical(bvec3);\n"
- "bvec4 subgroupQuadSwapVertical(bvec4);\n"
-
- "float subgroupQuadSwapDiagonal(float);\n"
- "vec2 subgroupQuadSwapDiagonal(vec2);\n"
- "vec3 subgroupQuadSwapDiagonal(vec3);\n"
- "vec4 subgroupQuadSwapDiagonal(vec4);\n"
- "int subgroupQuadSwapDiagonal(int);\n"
- "ivec2 subgroupQuadSwapDiagonal(ivec2);\n"
- "ivec3 subgroupQuadSwapDiagonal(ivec3);\n"
- "ivec4 subgroupQuadSwapDiagonal(ivec4);\n"
- "uint subgroupQuadSwapDiagonal(uint);\n"
- "uvec2 subgroupQuadSwapDiagonal(uvec2);\n"
- "uvec3 subgroupQuadSwapDiagonal(uvec3);\n"
- "uvec4 subgroupQuadSwapDiagonal(uvec4);\n"
- "bool subgroupQuadSwapDiagonal(bool);\n"
- "bvec2 subgroupQuadSwapDiagonal(bvec2);\n"
- "bvec3 subgroupQuadSwapDiagonal(bvec3);\n"
- "bvec4 subgroupQuadSwapDiagonal(bvec4);\n"
-
-#ifdef NV_EXTENSIONS
- "uvec4 subgroupPartitionNV(float);\n"
- "uvec4 subgroupPartitionNV(vec2);\n"
- "uvec4 subgroupPartitionNV(vec3);\n"
- "uvec4 subgroupPartitionNV(vec4);\n"
- "uvec4 subgroupPartitionNV(int);\n"
- "uvec4 subgroupPartitionNV(ivec2);\n"
- "uvec4 subgroupPartitionNV(ivec3);\n"
- "uvec4 subgroupPartitionNV(ivec4);\n"
- "uvec4 subgroupPartitionNV(uint);\n"
- "uvec4 subgroupPartitionNV(uvec2);\n"
- "uvec4 subgroupPartitionNV(uvec3);\n"
- "uvec4 subgroupPartitionNV(uvec4);\n"
- "uvec4 subgroupPartitionNV(bool);\n"
- "uvec4 subgroupPartitionNV(bvec2);\n"
- "uvec4 subgroupPartitionNV(bvec3);\n"
- "uvec4 subgroupPartitionNV(bvec4);\n"
-
- "float subgroupPartitionedAddNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedAddNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedAddNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedAddNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedAddNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedAddNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedAddNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedAddNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedAddNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedAddNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedAddNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedAddNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedMulNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedMulNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedMulNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedMulNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedMulNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedMulNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedMulNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedMulNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedMulNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedMulNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedMulNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedMulNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedMinNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedMinNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedMinNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedMinNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedMinNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedMinNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedMinNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedMinNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedMinNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedMinNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedMinNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedMinNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedMaxNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedMaxNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedMaxNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedMaxNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedMaxNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedMaxNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedMaxNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedMaxNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedMaxNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedMaxNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedMaxNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedMaxNV(uvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedAndNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedAndNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedAndNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedAndNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedAndNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedAndNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedAndNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedAndNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedAndNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedAndNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedAndNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedAndNV(bvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedOrNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedOrNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedOrNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedOrNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedOrNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedOrNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedOrNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedOrNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedOrNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedOrNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedOrNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedOrNV(bvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedXorNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedXorNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedXorNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedXorNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedXorNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedXorNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedXorNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedXorNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedXorNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedXorNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedXorNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedXorNV(bvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedInclusiveAddNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedInclusiveAddNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedInclusiveAddNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedInclusiveAddNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedInclusiveAddNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveAddNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveAddNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveAddNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveAddNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveAddNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveAddNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveAddNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedInclusiveMulNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedInclusiveMulNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedInclusiveMulNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedInclusiveMulNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedInclusiveMulNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveMulNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveMulNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveMulNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveMulNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveMulNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveMulNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveMulNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedInclusiveMinNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedInclusiveMinNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedInclusiveMinNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedInclusiveMinNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedInclusiveMinNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveMinNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveMinNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveMinNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveMinNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveMinNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveMinNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveMinNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedInclusiveMaxNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedInclusiveMaxNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedInclusiveMaxNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedInclusiveMaxNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedInclusiveMaxNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveMaxNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveMaxNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveMaxNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveMaxNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveMaxNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveMaxNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveMaxNV(uvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedInclusiveAndNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveAndNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveAndNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveAndNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveAndNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveAndNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveAndNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveAndNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedInclusiveAndNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedInclusiveAndNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedInclusiveAndNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedInclusiveAndNV(bvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedInclusiveOrNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveOrNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveOrNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveOrNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveOrNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveOrNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveOrNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveOrNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedInclusiveOrNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedInclusiveOrNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedInclusiveOrNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedInclusiveOrNV(bvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedInclusiveXorNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedInclusiveXorNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedInclusiveXorNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedInclusiveXorNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedInclusiveXorNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedInclusiveXorNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedInclusiveXorNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedInclusiveXorNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedInclusiveXorNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedInclusiveXorNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedInclusiveXorNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedInclusiveXorNV(bvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedExclusiveAddNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedExclusiveAddNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedExclusiveAddNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedExclusiveAddNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedExclusiveAddNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveAddNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveAddNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveAddNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveAddNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveAddNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveAddNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveAddNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedExclusiveMulNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedExclusiveMulNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedExclusiveMulNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedExclusiveMulNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedExclusiveMulNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveMulNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveMulNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveMulNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveMulNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveMulNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveMulNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveMulNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedExclusiveMinNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedExclusiveMinNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedExclusiveMinNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedExclusiveMinNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedExclusiveMinNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveMinNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveMinNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveMinNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveMinNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveMinNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveMinNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveMinNV(uvec4, uvec4 ballot);\n"
-
- "float subgroupPartitionedExclusiveMaxNV(float, uvec4 ballot);\n"
- "vec2 subgroupPartitionedExclusiveMaxNV(vec2, uvec4 ballot);\n"
- "vec3 subgroupPartitionedExclusiveMaxNV(vec3, uvec4 ballot);\n"
- "vec4 subgroupPartitionedExclusiveMaxNV(vec4, uvec4 ballot);\n"
- "int subgroupPartitionedExclusiveMaxNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveMaxNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveMaxNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveMaxNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveMaxNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveMaxNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveMaxNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveMaxNV(uvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedExclusiveAndNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveAndNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveAndNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveAndNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveAndNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveAndNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveAndNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveAndNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedExclusiveAndNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedExclusiveAndNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedExclusiveAndNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedExclusiveAndNV(bvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedExclusiveOrNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveOrNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveOrNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveOrNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveOrNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveOrNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveOrNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveOrNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedExclusiveOrNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedExclusiveOrNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedExclusiveOrNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedExclusiveOrNV(bvec4, uvec4 ballot);\n"
-
- "int subgroupPartitionedExclusiveXorNV(int, uvec4 ballot);\n"
- "ivec2 subgroupPartitionedExclusiveXorNV(ivec2, uvec4 ballot);\n"
- "ivec3 subgroupPartitionedExclusiveXorNV(ivec3, uvec4 ballot);\n"
- "ivec4 subgroupPartitionedExclusiveXorNV(ivec4, uvec4 ballot);\n"
- "uint subgroupPartitionedExclusiveXorNV(uint, uvec4 ballot);\n"
- "uvec2 subgroupPartitionedExclusiveXorNV(uvec2, uvec4 ballot);\n"
- "uvec3 subgroupPartitionedExclusiveXorNV(uvec3, uvec4 ballot);\n"
- "uvec4 subgroupPartitionedExclusiveXorNV(uvec4, uvec4 ballot);\n"
- "bool subgroupPartitionedExclusiveXorNV(bool, uvec4 ballot);\n"
- "bvec2 subgroupPartitionedExclusiveXorNV(bvec2, uvec4 ballot);\n"
- "bvec3 subgroupPartitionedExclusiveXorNV(bvec3, uvec4 ballot);\n"
- "bvec4 subgroupPartitionedExclusiveXorNV(bvec4, uvec4 ballot);\n"
-#endif
-
- "\n");
-
- if (profile != EEsProfile && version >= 400) {
- commonBuiltins.append(
- "bool subgroupAllEqual(double);\n"
- "bool subgroupAllEqual(dvec2);\n"
- "bool subgroupAllEqual(dvec3);\n"
- "bool subgroupAllEqual(dvec4);\n"
-
- "double subgroupBroadcast(double, uint);\n"
- "dvec2 subgroupBroadcast(dvec2, uint);\n"
- "dvec3 subgroupBroadcast(dvec3, uint);\n"
- "dvec4 subgroupBroadcast(dvec4, uint);\n"
-
- "double subgroupBroadcastFirst(double);\n"
- "dvec2 subgroupBroadcastFirst(dvec2);\n"
- "dvec3 subgroupBroadcastFirst(dvec3);\n"
- "dvec4 subgroupBroadcastFirst(dvec4);\n"
-
- "double subgroupShuffle(double, uint);\n"
- "dvec2 subgroupShuffle(dvec2, uint);\n"
- "dvec3 subgroupShuffle(dvec3, uint);\n"
- "dvec4 subgroupShuffle(dvec4, uint);\n"
-
- "double subgroupShuffleXor(double, uint);\n"
- "dvec2 subgroupShuffleXor(dvec2, uint);\n"
- "dvec3 subgroupShuffleXor(dvec3, uint);\n"
- "dvec4 subgroupShuffleXor(dvec4, uint);\n"
-
- "double subgroupShuffleUp(double, uint delta);\n"
- "dvec2 subgroupShuffleUp(dvec2, uint delta);\n"
- "dvec3 subgroupShuffleUp(dvec3, uint delta);\n"
- "dvec4 subgroupShuffleUp(dvec4, uint delta);\n"
-
- "double subgroupShuffleDown(double, uint delta);\n"
- "dvec2 subgroupShuffleDown(dvec2, uint delta);\n"
- "dvec3 subgroupShuffleDown(dvec3, uint delta);\n"
- "dvec4 subgroupShuffleDown(dvec4, uint delta);\n"
-
- "double subgroupAdd(double);\n"
- "dvec2 subgroupAdd(dvec2);\n"
- "dvec3 subgroupAdd(dvec3);\n"
- "dvec4 subgroupAdd(dvec4);\n"
-
- "double subgroupMul(double);\n"
- "dvec2 subgroupMul(dvec2);\n"
- "dvec3 subgroupMul(dvec3);\n"
- "dvec4 subgroupMul(dvec4);\n"
-
- "double subgroupMin(double);\n"
- "dvec2 subgroupMin(dvec2);\n"
- "dvec3 subgroupMin(dvec3);\n"
- "dvec4 subgroupMin(dvec4);\n"
-
- "double subgroupMax(double);\n"
- "dvec2 subgroupMax(dvec2);\n"
- "dvec3 subgroupMax(dvec3);\n"
- "dvec4 subgroupMax(dvec4);\n"
-
- "double subgroupInclusiveAdd(double);\n"
- "dvec2 subgroupInclusiveAdd(dvec2);\n"
- "dvec3 subgroupInclusiveAdd(dvec3);\n"
- "dvec4 subgroupInclusiveAdd(dvec4);\n"
-
- "double subgroupInclusiveMul(double);\n"
- "dvec2 subgroupInclusiveMul(dvec2);\n"
- "dvec3 subgroupInclusiveMul(dvec3);\n"
- "dvec4 subgroupInclusiveMul(dvec4);\n"
-
- "double subgroupInclusiveMin(double);\n"
- "dvec2 subgroupInclusiveMin(dvec2);\n"
- "dvec3 subgroupInclusiveMin(dvec3);\n"
- "dvec4 subgroupInclusiveMin(dvec4);\n"
-
- "double subgroupInclusiveMax(double);\n"
- "dvec2 subgroupInclusiveMax(dvec2);\n"
- "dvec3 subgroupInclusiveMax(dvec3);\n"
- "dvec4 subgroupInclusiveMax(dvec4);\n"
-
- "double subgroupExclusiveAdd(double);\n"
- "dvec2 subgroupExclusiveAdd(dvec2);\n"
- "dvec3 subgroupExclusiveAdd(dvec3);\n"
- "dvec4 subgroupExclusiveAdd(dvec4);\n"
-
- "double subgroupExclusiveMul(double);\n"
- "dvec2 subgroupExclusiveMul(dvec2);\n"
- "dvec3 subgroupExclusiveMul(dvec3);\n"
- "dvec4 subgroupExclusiveMul(dvec4);\n"
-
- "double subgroupExclusiveMin(double);\n"
- "dvec2 subgroupExclusiveMin(dvec2);\n"
- "dvec3 subgroupExclusiveMin(dvec3);\n"
- "dvec4 subgroupExclusiveMin(dvec4);\n"
-
- "double subgroupExclusiveMax(double);\n"
- "dvec2 subgroupExclusiveMax(dvec2);\n"
- "dvec3 subgroupExclusiveMax(dvec3);\n"
- "dvec4 subgroupExclusiveMax(dvec4);\n"
-
- "double subgroupClusteredAdd(double, uint);\n"
- "dvec2 subgroupClusteredAdd(dvec2, uint);\n"
- "dvec3 subgroupClusteredAdd(dvec3, uint);\n"
- "dvec4 subgroupClusteredAdd(dvec4, uint);\n"
-
- "double subgroupClusteredMul(double, uint);\n"
- "dvec2 subgroupClusteredMul(dvec2, uint);\n"
- "dvec3 subgroupClusteredMul(dvec3, uint);\n"
- "dvec4 subgroupClusteredMul(dvec4, uint);\n"
-
- "double subgroupClusteredMin(double, uint);\n"
- "dvec2 subgroupClusteredMin(dvec2, uint);\n"
- "dvec3 subgroupClusteredMin(dvec3, uint);\n"
- "dvec4 subgroupClusteredMin(dvec4, uint);\n"
-
- "double subgroupClusteredMax(double, uint);\n"
- "dvec2 subgroupClusteredMax(dvec2, uint);\n"
- "dvec3 subgroupClusteredMax(dvec3, uint);\n"
- "dvec4 subgroupClusteredMax(dvec4, uint);\n"
-
- "double subgroupQuadBroadcast(double, uint);\n"
- "dvec2 subgroupQuadBroadcast(dvec2, uint);\n"
- "dvec3 subgroupQuadBroadcast(dvec3, uint);\n"
- "dvec4 subgroupQuadBroadcast(dvec4, uint);\n"
-
- "double subgroupQuadSwapHorizontal(double);\n"
- "dvec2 subgroupQuadSwapHorizontal(dvec2);\n"
- "dvec3 subgroupQuadSwapHorizontal(dvec3);\n"
- "dvec4 subgroupQuadSwapHorizontal(dvec4);\n"
-
- "double subgroupQuadSwapVertical(double);\n"
- "dvec2 subgroupQuadSwapVertical(dvec2);\n"
- "dvec3 subgroupQuadSwapVertical(dvec3);\n"
- "dvec4 subgroupQuadSwapVertical(dvec4);\n"
-
- "double subgroupQuadSwapDiagonal(double);\n"
- "dvec2 subgroupQuadSwapDiagonal(dvec2);\n"
- "dvec3 subgroupQuadSwapDiagonal(dvec3);\n"
- "dvec4 subgroupQuadSwapDiagonal(dvec4);\n"
-
-
-#ifdef NV_EXTENSIONS
- "uvec4 subgroupPartitionNV(double);\n"
- "uvec4 subgroupPartitionNV(dvec2);\n"
- "uvec4 subgroupPartitionNV(dvec3);\n"
- "uvec4 subgroupPartitionNV(dvec4);\n"
-
- "double subgroupPartitionedAddNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedAddNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedAddNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedAddNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedMulNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedMulNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedMulNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedMulNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedMinNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedMinNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedMinNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedMinNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedMaxNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedMaxNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedMaxNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedMaxNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedInclusiveAddNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedInclusiveAddNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedInclusiveAddNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedInclusiveAddNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedInclusiveMulNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedInclusiveMulNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedInclusiveMulNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedInclusiveMulNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedInclusiveMinNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedInclusiveMinNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedInclusiveMinNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedInclusiveMinNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedInclusiveMaxNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedInclusiveMaxNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedInclusiveMaxNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedInclusiveMaxNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedExclusiveAddNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedExclusiveAddNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedExclusiveAddNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedExclusiveAddNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedExclusiveMulNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedExclusiveMulNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedExclusiveMulNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedExclusiveMulNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedExclusiveMinNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedExclusiveMinNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedExclusiveMinNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedExclusiveMinNV(dvec4, uvec4 ballot);\n"
-
- "double subgroupPartitionedExclusiveMaxNV(double, uvec4 ballot);\n"
- "dvec2 subgroupPartitionedExclusiveMaxNV(dvec2, uvec4 ballot);\n"
- "dvec3 subgroupPartitionedExclusiveMaxNV(dvec3, uvec4 ballot);\n"
- "dvec4 subgroupPartitionedExclusiveMaxNV(dvec4, uvec4 ballot);\n"
-#endif
-
- "\n");
+ // Generate all flavors of subgroup ops.
+ static const char *subgroupOps[] =
+ {
+ "bool subgroupAllEqual(%s);\n",
+ "%s subgroupBroadcast(%s, uint);\n",
+ "%s subgroupBroadcastFirst(%s);\n",
+ "%s subgroupShuffle(%s, uint);\n",
+ "%s subgroupShuffleXor(%s, uint);\n",
+ "%s subgroupShuffleUp(%s, uint delta);\n",
+ "%s subgroupShuffleDown(%s, uint delta);\n",
+ "%s subgroupAdd(%s);\n",
+ "%s subgroupMul(%s);\n",
+ "%s subgroupMin(%s);\n",
+ "%s subgroupMax(%s);\n",
+ "%s subgroupAnd(%s);\n",
+ "%s subgroupOr(%s);\n",
+ "%s subgroupXor(%s);\n",
+ "%s subgroupInclusiveAdd(%s);\n",
+ "%s subgroupInclusiveMul(%s);\n",
+ "%s subgroupInclusiveMin(%s);\n",
+ "%s subgroupInclusiveMax(%s);\n",
+ "%s subgroupInclusiveAnd(%s);\n",
+ "%s subgroupInclusiveOr(%s);\n",
+ "%s subgroupInclusiveXor(%s);\n",
+ "%s subgroupExclusiveAdd(%s);\n",
+ "%s subgroupExclusiveMul(%s);\n",
+ "%s subgroupExclusiveMin(%s);\n",
+ "%s subgroupExclusiveMax(%s);\n",
+ "%s subgroupExclusiveAnd(%s);\n",
+ "%s subgroupExclusiveOr(%s);\n",
+ "%s subgroupExclusiveXor(%s);\n",
+ "%s subgroupClusteredAdd(%s, uint);\n",
+ "%s subgroupClusteredMul(%s, uint);\n",
+ "%s subgroupClusteredMin(%s, uint);\n",
+ "%s subgroupClusteredMax(%s, uint);\n",
+ "%s subgroupClusteredAnd(%s, uint);\n",
+ "%s subgroupClusteredOr(%s, uint);\n",
+ "%s subgroupClusteredXor(%s, uint);\n",
+ "%s subgroupQuadBroadcast(%s, uint);\n",
+ "%s subgroupQuadSwapHorizontal(%s);\n",
+ "%s subgroupQuadSwapVertical(%s);\n",
+ "%s subgroupQuadSwapDiagonal(%s);\n",
+ "uvec4 subgroupPartitionNV(%s);\n",
+ "%s subgroupPartitionedAddNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedMulNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedMinNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedMaxNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedAndNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedOrNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedXorNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveAddNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveMulNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveMinNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveMaxNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveAndNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveOrNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedInclusiveXorNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveAddNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveMulNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveMinNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveMaxNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveAndNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveOrNV(%s, uvec4 ballot);\n",
+ "%s subgroupPartitionedExclusiveXorNV(%s, uvec4 ballot);\n",
+ };
+
+ static const char *floatTypes[] = {
+ "float", "vec2", "vec3", "vec4",
+ "float16_t", "f16vec2", "f16vec3", "f16vec4",
+ };
+ static const char *doubleTypes[] = {
+ "double", "dvec2", "dvec3", "dvec4",
+ };
+ static const char *intTypes[] = {
+ "int8_t", "i8vec2", "i8vec3", "i8vec4",
+ "int16_t", "i16vec2", "i16vec3", "i16vec4",
+ "int", "ivec2", "ivec3", "ivec4",
+ "int64_t", "i64vec2", "i64vec3", "i64vec4",
+ "uint8_t", "u8vec2", "u8vec3", "u8vec4",
+ "uint16_t", "u16vec2", "u16vec3", "u16vec4",
+ "uint", "uvec2", "uvec3", "uvec4",
+ "uint64_t", "u64vec2", "u64vec3", "u64vec4",
+ };
+ static const char *boolTypes[] = {
+ "bool", "bvec2", "bvec3", "bvec4",
+ };
+
+ for (size_t i = 0; i < sizeof(subgroupOps)/sizeof(subgroupOps[0]); ++i) {
+ const char *op = subgroupOps[i];
+
+ // Logical operations don't support float
+ bool logicalOp = strstr(op, "Or") || strstr(op, "And") ||
+ (strstr(op, "Xor") && !strstr(op, "ShuffleXor"));
+ // Math operations don't support bool
+ bool mathOp = strstr(op, "Add") || strstr(op, "Mul") || strstr(op, "Min") || strstr(op, "Max");
+
+ const int bufSize = 256;
+ char buf[bufSize];
+
+ if (!logicalOp) {
+ for (size_t j = 0; j < sizeof(floatTypes)/sizeof(floatTypes[0]); ++j) {
+ snprintf(buf, bufSize, op, floatTypes[j], floatTypes[j]);
+ commonBuiltins.append(buf);
+ }
+ if (profile != EEsProfile && version >= 400) {
+ for (size_t j = 0; j < sizeof(doubleTypes)/sizeof(doubleTypes[0]); ++j) {
+ snprintf(buf, bufSize, op, doubleTypes[j], doubleTypes[j]);
+ commonBuiltins.append(buf);
+ }
+ }
+ }
+ if (!mathOp) {
+ for (size_t j = 0; j < sizeof(boolTypes)/sizeof(boolTypes[0]); ++j) {
+ snprintf(buf, bufSize, op, boolTypes[j], boolTypes[j]);
+ commonBuiltins.append(buf);
+ }
+ }
+ for (size_t j = 0; j < sizeof(intTypes)/sizeof(intTypes[0]); ++j) {
+ snprintf(buf, bufSize, op, intTypes[j], intTypes[j]);
+ commonBuiltins.append(buf);
}
+ }
stageBuiltins[EShLangCompute].append(
"void subgroupMemoryBarrierShared();"
"\n"
);
-#ifdef NV_EXTENSIONS
stageBuiltins[EShLangMeshNV].append(
"void subgroupMemoryBarrierShared();"
"\n"
@@ -2961,7 +1947,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void subgroupMemoryBarrierShared();"
"\n"
);
-#endif
}
if (profile != EEsProfile && version >= 460) {
@@ -2973,7 +1958,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
-#ifdef AMD_EXTENSIONS
// GL_AMD_shader_ballot
if (profile != EEsProfile && version >= 450) {
commonBuiltins.append(
@@ -3838,12 +2822,13 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
}
// GL_AMD_gcn_shader
- if (profile != EEsProfile && version >= 450) {
+ if (profile != EEsProfile && version >= 440) {
commonBuiltins.append(
"float cubeFaceIndexAMD(vec3);"
"vec2 cubeFaceCoordAMD(vec3);"
"uint64_t timeAMD();"
+ "in int gl_SIMDGroupSizeAMD;"
"\n");
}
@@ -3869,11 +2854,182 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
-#endif // AMD_EXTENSIONS
-
+ if ((profile != EEsProfile && version >= 130) ||
+ (profile == EEsProfile && version >= 300)) {
+ commonBuiltins.append(
+ "uint countLeadingZeros(uint);"
+ "uvec2 countLeadingZeros(uvec2);"
+ "uvec3 countLeadingZeros(uvec3);"
+ "uvec4 countLeadingZeros(uvec4);"
+
+ "uint countTrailingZeros(uint);"
+ "uvec2 countTrailingZeros(uvec2);"
+ "uvec3 countTrailingZeros(uvec3);"
+ "uvec4 countTrailingZeros(uvec4);"
+
+ "uint absoluteDifference(int, int);"
+ "uvec2 absoluteDifference(ivec2, ivec2);"
+ "uvec3 absoluteDifference(ivec3, ivec3);"
+ "uvec4 absoluteDifference(ivec4, ivec4);"
+
+ "uint16_t absoluteDifference(int16_t, int16_t);"
+ "u16vec2 absoluteDifference(i16vec2, i16vec2);"
+ "u16vec3 absoluteDifference(i16vec3, i16vec3);"
+ "u16vec4 absoluteDifference(i16vec4, i16vec4);"
+
+ "uint64_t absoluteDifference(int64_t, int64_t);"
+ "u64vec2 absoluteDifference(i64vec2, i64vec2);"
+ "u64vec3 absoluteDifference(i64vec3, i64vec3);"
+ "u64vec4 absoluteDifference(i64vec4, i64vec4);"
+
+ "uint absoluteDifference(uint, uint);"
+ "uvec2 absoluteDifference(uvec2, uvec2);"
+ "uvec3 absoluteDifference(uvec3, uvec3);"
+ "uvec4 absoluteDifference(uvec4, uvec4);"
+
+ "uint16_t absoluteDifference(uint16_t, uint16_t);"
+ "u16vec2 absoluteDifference(u16vec2, u16vec2);"
+ "u16vec3 absoluteDifference(u16vec3, u16vec3);"
+ "u16vec4 absoluteDifference(u16vec4, u16vec4);"
+
+ "uint64_t absoluteDifference(uint64_t, uint64_t);"
+ "u64vec2 absoluteDifference(u64vec2, u64vec2);"
+ "u64vec3 absoluteDifference(u64vec3, u64vec3);"
+ "u64vec4 absoluteDifference(u64vec4, u64vec4);"
+
+ "int addSaturate(int, int);"
+ "ivec2 addSaturate(ivec2, ivec2);"
+ "ivec3 addSaturate(ivec3, ivec3);"
+ "ivec4 addSaturate(ivec4, ivec4);"
+
+ "int16_t addSaturate(int16_t, int16_t);"
+ "i16vec2 addSaturate(i16vec2, i16vec2);"
+ "i16vec3 addSaturate(i16vec3, i16vec3);"
+ "i16vec4 addSaturate(i16vec4, i16vec4);"
+
+ "int64_t addSaturate(int64_t, int64_t);"
+ "i64vec2 addSaturate(i64vec2, i64vec2);"
+ "i64vec3 addSaturate(i64vec3, i64vec3);"
+ "i64vec4 addSaturate(i64vec4, i64vec4);"
+
+ "uint addSaturate(uint, uint);"
+ "uvec2 addSaturate(uvec2, uvec2);"
+ "uvec3 addSaturate(uvec3, uvec3);"
+ "uvec4 addSaturate(uvec4, uvec4);"
+
+ "uint16_t addSaturate(uint16_t, uint16_t);"
+ "u16vec2 addSaturate(u16vec2, u16vec2);"
+ "u16vec3 addSaturate(u16vec3, u16vec3);"
+ "u16vec4 addSaturate(u16vec4, u16vec4);"
+
+ "uint64_t addSaturate(uint64_t, uint64_t);"
+ "u64vec2 addSaturate(u64vec2, u64vec2);"
+ "u64vec3 addSaturate(u64vec3, u64vec3);"
+ "u64vec4 addSaturate(u64vec4, u64vec4);"
+
+ "int subtractSaturate(int, int);"
+ "ivec2 subtractSaturate(ivec2, ivec2);"
+ "ivec3 subtractSaturate(ivec3, ivec3);"
+ "ivec4 subtractSaturate(ivec4, ivec4);"
+
+ "int16_t subtractSaturate(int16_t, int16_t);"
+ "i16vec2 subtractSaturate(i16vec2, i16vec2);"
+ "i16vec3 subtractSaturate(i16vec3, i16vec3);"
+ "i16vec4 subtractSaturate(i16vec4, i16vec4);"
+
+ "int64_t subtractSaturate(int64_t, int64_t);"
+ "i64vec2 subtractSaturate(i64vec2, i64vec2);"
+ "i64vec3 subtractSaturate(i64vec3, i64vec3);"
+ "i64vec4 subtractSaturate(i64vec4, i64vec4);"
+
+ "uint subtractSaturate(uint, uint);"
+ "uvec2 subtractSaturate(uvec2, uvec2);"
+ "uvec3 subtractSaturate(uvec3, uvec3);"
+ "uvec4 subtractSaturate(uvec4, uvec4);"
+
+ "uint16_t subtractSaturate(uint16_t, uint16_t);"
+ "u16vec2 subtractSaturate(u16vec2, u16vec2);"
+ "u16vec3 subtractSaturate(u16vec3, u16vec3);"
+ "u16vec4 subtractSaturate(u16vec4, u16vec4);"
+
+ "uint64_t subtractSaturate(uint64_t, uint64_t);"
+ "u64vec2 subtractSaturate(u64vec2, u64vec2);"
+ "u64vec3 subtractSaturate(u64vec3, u64vec3);"
+ "u64vec4 subtractSaturate(u64vec4, u64vec4);"
+
+ "int average(int, int);"
+ "ivec2 average(ivec2, ivec2);"
+ "ivec3 average(ivec3, ivec3);"
+ "ivec4 average(ivec4, ivec4);"
+
+ "int16_t average(int16_t, int16_t);"
+ "i16vec2 average(i16vec2, i16vec2);"
+ "i16vec3 average(i16vec3, i16vec3);"
+ "i16vec4 average(i16vec4, i16vec4);"
+
+ "int64_t average(int64_t, int64_t);"
+ "i64vec2 average(i64vec2, i64vec2);"
+ "i64vec3 average(i64vec3, i64vec3);"
+ "i64vec4 average(i64vec4, i64vec4);"
+
+ "uint average(uint, uint);"
+ "uvec2 average(uvec2, uvec2);"
+ "uvec3 average(uvec3, uvec3);"
+ "uvec4 average(uvec4, uvec4);"
+
+ "uint16_t average(uint16_t, uint16_t);"
+ "u16vec2 average(u16vec2, u16vec2);"
+ "u16vec3 average(u16vec3, u16vec3);"
+ "u16vec4 average(u16vec4, u16vec4);"
+
+ "uint64_t average(uint64_t, uint64_t);"
+ "u64vec2 average(u64vec2, u64vec2);"
+ "u64vec3 average(u64vec3, u64vec3);"
+ "u64vec4 average(u64vec4, u64vec4);"
+
+ "int averageRounded(int, int);"
+ "ivec2 averageRounded(ivec2, ivec2);"
+ "ivec3 averageRounded(ivec3, ivec3);"
+ "ivec4 averageRounded(ivec4, ivec4);"
+
+ "int16_t averageRounded(int16_t, int16_t);"
+ "i16vec2 averageRounded(i16vec2, i16vec2);"
+ "i16vec3 averageRounded(i16vec3, i16vec3);"
+ "i16vec4 averageRounded(i16vec4, i16vec4);"
+
+ "int64_t averageRounded(int64_t, int64_t);"
+ "i64vec2 averageRounded(i64vec2, i64vec2);"
+ "i64vec3 averageRounded(i64vec3, i64vec3);"
+ "i64vec4 averageRounded(i64vec4, i64vec4);"
+
+ "uint averageRounded(uint, uint);"
+ "uvec2 averageRounded(uvec2, uvec2);"
+ "uvec3 averageRounded(uvec3, uvec3);"
+ "uvec4 averageRounded(uvec4, uvec4);"
+
+ "uint16_t averageRounded(uint16_t, uint16_t);"
+ "u16vec2 averageRounded(u16vec2, u16vec2);"
+ "u16vec3 averageRounded(u16vec3, u16vec3);"
+ "u16vec4 averageRounded(u16vec4, u16vec4);"
+
+ "uint64_t averageRounded(uint64_t, uint64_t);"
+ "u64vec2 averageRounded(u64vec2, u64vec2);"
+ "u64vec3 averageRounded(u64vec3, u64vec3);"
+ "u64vec4 averageRounded(u64vec4, u64vec4);"
+
+ "int multiply32x16(int, int);"
+ "ivec2 multiply32x16(ivec2, ivec2);"
+ "ivec3 multiply32x16(ivec3, ivec3);"
+ "ivec4 multiply32x16(ivec4, ivec4);"
+
+ "uint multiply32x16(uint, uint);"
+ "uvec2 multiply32x16(uvec2, uvec2);"
+ "uvec3 multiply32x16(uvec3, uvec3);"
+ "uvec4 multiply32x16(uvec4, uvec4);"
+ "\n");
+ }
-#ifdef NV_EXTENSIONS
- if ((profile != EEsProfile && version >= 450) ||
+ if ((profile != EEsProfile && version >= 450) ||
(profile == EEsProfile && version >= 320)) {
commonBuiltins.append(
"struct gl_TextureFootprint2DNV {"
@@ -3906,7 +3062,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
-#endif // NV_EXTENSIONS
// GL_AMD_gpu_shader_half_float/Explicit types
if (profile != EEsProfile && version >= 450) {
commonBuiltins.append(
@@ -4791,7 +3946,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
//
// Geometric Functions.
//
- if (IncludeLegacy(version, profile, spvVersion))
+ if (spvVersion.vulkan == 0 && IncludeLegacy(version, profile, spvVersion))
stageBuiltins[EShLangVertex].append("vec4 ftransform();");
//
@@ -4873,6 +4028,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void EndPrimitive();"
"\n");
}
+#endif
//============================================================================
//
@@ -4888,7 +4044,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
stageBuiltins[EShLangCompute].append(
"void barrier();"
);
-#ifdef NV_EXTENSIONS
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
stageBuiltins[EShLangMeshNV].append(
"void barrier();"
@@ -4897,23 +4052,26 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void barrier();"
);
}
-#endif
if ((profile != EEsProfile && version >= 130) || esBarrier)
commonBuiltins.append(
"void memoryBarrier();"
);
if ((profile != EEsProfile && version >= 420) || esBarrier) {
commonBuiltins.append(
- "void memoryBarrierAtomicCounter();"
"void memoryBarrierBuffer();"
- "void memoryBarrierImage();"
);
stageBuiltins[EShLangCompute].append(
"void memoryBarrierShared();"
"void groupMemoryBarrier();"
);
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
+ if ((profile != EEsProfile && version >= 420) || esBarrier) {
+ commonBuiltins.append(
+ "void memoryBarrierAtomicCounter();"
+ "void memoryBarrierImage();"
+ );
+ }
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
stageBuiltins[EShLangMeshNV].append(
"void memoryBarrierShared();"
@@ -4924,7 +4082,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void groupMemoryBarrier();"
);
}
-#endif
commonBuiltins.append("void controlBarrier(int, int, int, int);\n"
"void memoryBarrier(int, int, int);\n");
@@ -4954,6 +4111,60 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"void coopMatStoreNV(fcoopmatNV m, volatile coherent uvec4[] buf, uint element, uint stride, bool colMajor);\n"
"fcoopmatNV coopMatMulAddNV(fcoopmatNV A, fcoopmatNV B, fcoopmatNV C);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent int8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent int16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent int[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent int64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent ivec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent ivec4[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent uint8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent uint16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent uint[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent uint64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent uvec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out icoopmatNV m, volatile coherent uvec4[] buf, uint element, uint stride, bool colMajor);\n"
+
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent int8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent int16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent int[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent int64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent ivec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent ivec4[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent uint8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent uint16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent uint[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent uint64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent uvec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatLoadNV(out ucoopmatNV m, volatile coherent uvec4[] buf, uint element, uint stride, bool colMajor);\n"
+
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent int8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent int16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent int[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent int64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent ivec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent ivec4[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent uint8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent uint16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent uint[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent uint64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent uvec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(icoopmatNV m, volatile coherent uvec4[] buf, uint element, uint stride, bool colMajor);\n"
+
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent int8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent int16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent int[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent int64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent ivec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent ivec4[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent uint8_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent uint16_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent uint[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent uint64_t[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent uvec2[] buf, uint element, uint stride, bool colMajor);\n"
+ "void coopMatStoreNV(ucoopmatNV m, volatile coherent uvec4[] buf, uint element, uint stride, bool colMajor);\n"
+
+ "icoopmatNV coopMatMulAddNV(icoopmatNV A, icoopmatNV B, icoopmatNV C);\n"
+ "ucoopmatNV coopMatMulAddNV(ucoopmatNV A, ucoopmatNV B, ucoopmatNV C);\n"
);
}
@@ -4999,9 +4210,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
- stageBuiltins[EShLangFragment].append(derivatives);
- stageBuiltins[EShLangFragment].append("\n");
-
// GL_ARB_derivative_control
if (profile != EEsProfile && version >= 400) {
stageBuiltins[EShLangFragment].append(derivativeControls);
@@ -5030,7 +4238,14 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
-#ifdef AMD_EXTENSIONS
+ stageBuiltins[EShLangFragment].append(
+ "void beginInvocationInterlockARB(void);"
+ "void endInvocationInterlockARB(void);");
+
+ stageBuiltins[EShLangFragment].append(
+ "bool helperInvocationEXT();"
+ "\n");
+
// GL_AMD_shader_explicit_vertex_parameter
if (profile != EEsProfile && version >= 450) {
stageBuiltins[EShLangFragment].append(
@@ -5081,6 +4296,16 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
+ // GL_ARB_shader_clock & GL_EXT_shader_realtime_clock
+ if (profile != EEsProfile && version >= 450) {
+ commonBuiltins.append(
+ "uvec2 clock2x32ARB();"
+ "uint64_t clockARB();"
+ "uvec2 clockRealtime2x32EXT();"
+ "uint64_t clockRealtimeEXT();"
+ "\n");
+ }
+
// GL_AMD_shader_fragment_mask
if (profile != EEsProfile && version >= 450 && spvVersion.vulkan > 0) {
stageBuiltins[EShLangFragment].append(
@@ -5094,9 +4319,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
-#endif
-
-#ifdef NV_EXTENSIONS
// Builtins for GL_NV_ray_tracing
if (profile != EEsProfile && version >= 460) {
@@ -5125,14 +4347,11 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
}
//E_SPV_NV_compute_shader_derivatives
-
- stageBuiltins[EShLangCompute].append(derivatives);
- stageBuiltins[EShLangCompute].append(derivativeControls);
- stageBuiltins[EShLangCompute].append("\n");
-
-
+ if ((profile == EEsProfile && version >= 320) || (profile != EEsProfile && version >= 450)) {
+ stageBuiltins[EShLangCompute].append(derivativeControls);
+ stageBuiltins[EShLangCompute].append("\n");
+ }
if (profile != EEsProfile && version >= 450) {
-
stageBuiltins[EShLangCompute].append(derivativesAndControl16bits);
stageBuiltins[EShLangCompute].append(derivativesAndControl64bits);
stageBuiltins[EShLangCompute].append("\n");
@@ -5142,7 +4361,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
stageBuiltins[EShLangMeshNV].append(
"void writePackedPrimitiveIndices4x8NV(uint, uint);"
- "\n");
+ "\n");
}
#endif
@@ -5166,11 +4385,13 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"highp float diff;" // f - n
);
} else {
+#ifndef GLSLANG_WEB
commonBuiltins.append(
"float near;" // n
"float far;" // f
"float diff;" // f - n
);
+#endif
}
commonBuiltins.append(
@@ -5179,6 +4400,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
+#ifndef GLSLANG_WEB
if (spvVersion.spv == 0 && IncludeLegacy(version, profile, spvVersion)) {
//
// Matrix state. p. 31, 32, 37, 39, 40.
@@ -5296,6 +4518,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
+#endif
//============================================================================
//
@@ -5325,7 +4548,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
//============================================================================
//
// Define the interface to the mesh/task shader.
@@ -5413,7 +4636,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"\n");
}
}
-#endif
//============================================================================
//
@@ -5547,7 +4769,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
);
}
-#ifdef NV_EXTENSIONS
if (version >= 450)
stageBuiltins[EShLangVertex].append(
"out int gl_ViewportMask[];" // GL_NV_viewport_array2
@@ -5556,8 +4777,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"out vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
"out int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes
);
-#endif
-
} else {
// ES profile
if (version == 100) {
@@ -5572,15 +4791,19 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in highp int gl_InstanceID;" // needs qualifier fixed later
);
if (spvVersion.vulkan > 0)
+#endif
stageBuiltins[EShLangVertex].append(
"in highp int gl_VertexIndex;"
"in highp int gl_InstanceIndex;"
);
+#ifndef GLSLANG_WEB
if (version < 310)
+#endif
stageBuiltins[EShLangVertex].append(
"highp vec4 gl_Position;" // needs qualifier fixed later
"highp float gl_PointSize;" // needs qualifier fixed later
);
+#ifndef GLSLANG_WEB
else
stageBuiltins[EShLangVertex].append(
"out gl_PerVertex {"
@@ -5632,10 +4855,8 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
if (version >= 450)
stageBuiltins[EShLangGeometry].append(
"float gl_CullDistance[];"
-#ifdef NV_EXTENSIONS
"vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering
"vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
-#endif
);
stageBuiltins[EShLangGeometry].append(
"} gl_in[];"
@@ -5681,7 +4902,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in int gl_InvocationID;"
);
-#ifdef NV_EXTENSIONS
if (version >= 450)
stageBuiltins[EShLangGeometry].append(
"out int gl_ViewportMask[];" // GL_NV_viewport_array2
@@ -5690,7 +4910,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"out vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
"out int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes
);
-#endif
stageBuiltins[EShLangGeometry].append("\n");
} else if (profile == EEsProfile && version >= 310) {
@@ -5755,13 +4974,11 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
if (version >= 450)
stageBuiltins[EShLangTessControl].append(
"float gl_CullDistance[];"
-#ifdef NV_EXTENSIONS
"int gl_ViewportMask[];" // GL_NV_viewport_array2
"vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering
"int gl_SecondaryViewportMaskNV[];" // GL_NV_stereo_view_rendering
"vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
"int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes
-#endif
);
stageBuiltins[EShLangTessControl].append(
"} gl_out[];"
@@ -5860,7 +5077,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"out int gl_Layer;"
"\n");
-#ifdef NV_EXTENSIONS
if (version >= 450)
stageBuiltins[EShLangTessEvaluation].append(
"out int gl_ViewportMask[];" // GL_NV_viewport_array2
@@ -5869,7 +5085,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"out vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
"out int gl_ViewportMaskPerViewNV[];" // GL_NVX_multiview_per_view_attributes
);
-#endif
} else if (profile == EEsProfile && version >= 310) {
// Note: "in gl_PerVertex {...} gl_in[gl_MaxPatchVertices];" is declared in initialize() below,
@@ -5963,19 +5178,25 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"flat in int gl_PrimitiveID;"
);
- if (version >= 400) {
+ if (version >= 130) { // ARB_sample_shading
stageBuiltins[EShLangFragment].append(
"flat in int gl_SampleID;"
" in vec2 gl_SamplePosition;"
- "flat in int gl_SampleMaskIn[];"
" out int gl_SampleMask[];"
);
- if (spvVersion.spv == 0)
+
+ if (spvVersion.spv == 0) {
stageBuiltins[EShLangFragment].append(
"uniform int gl_NumSamples;"
- );
+ );
+ }
}
+ if (version >= 400)
+ stageBuiltins[EShLangFragment].append(
+ "flat in int gl_SampleMaskIn[];"
+ );
+
if (version >= 430)
stageBuiltins[EShLangFragment].append(
"flat in int gl_Layer;"
@@ -5994,7 +5215,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"flat in int gl_FragInvocationCountEXT;"
);
-#ifdef AMD_EXTENSIONS
if (version >= 450)
stageBuiltins[EShLangFragment].append(
"in vec2 gl_BaryCoordNoPerspAMD;"
@@ -6005,9 +5225,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in vec2 gl_BaryCoordSmoothSampleAMD;"
"in vec3 gl_BaryCoordPullModelAMD;"
);
-#endif
-#ifdef NV_EXTENSIONS
if (version >= 430)
stageBuiltins[EShLangFragment].append(
"in bool gl_FragFullyCoveredNV;"
@@ -6020,7 +5238,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in vec3 gl_BaryCoordNoPerspNV;"
);
-#endif
} else {
// ES profile
@@ -6032,6 +5249,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"mediump vec2 gl_PointCoord;" // needs qualifier fixed later
);
}
+#endif
if (version >= 300) {
stageBuiltins[EShLangFragment].append(
"highp vec4 gl_FragCoord;" // needs qualifier fixed later
@@ -6040,6 +5258,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"highp float gl_FragDepth;" // needs qualifier fixed later
);
}
+#ifndef GLSLANG_WEB
if (version >= 310) {
stageBuiltins[EShLangFragment].append(
"bool gl_HelperInvocation;" // needs qualifier fixed later
@@ -6067,7 +5286,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"flat in ivec2 gl_FragSizeEXT;"
"flat in int gl_FragInvocationCountEXT;"
);
-#ifdef NV_EXTENSIONS
if (version >= 320)
stageBuiltins[EShLangFragment].append( // GL_NV_shading_rate_image
"flat in ivec2 gl_FragmentSizeNV;"
@@ -6078,17 +5296,19 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in vec3 gl_BaryCoordNV;"
"in vec3 gl_BaryCoordNoPerspNV;"
);
+ }
#endif
- }
stageBuiltins[EShLangFragment].append("\n");
if (version >= 130)
add2ndGenerationSamplingImaging(version, profile, spvVersion);
+#ifndef GLSLANG_WEB
+
// GL_ARB_shader_ballot
if (profile != EEsProfile && version >= 450) {
- const char* ballotDecls =
+ const char* ballotDecls =
"uniform uint gl_SubGroupSizeARB;"
"in uint gl_SubGroupInvocationARB;"
"in uint64_t gl_SubGroupEqMaskARB;"
@@ -6097,7 +5317,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in uint64_t gl_SubGroupLeMaskARB;"
"in uint64_t gl_SubGroupLtMaskARB;"
"\n";
- const char* fragmentBallotDecls =
+ const char* fragmentBallotDecls =
"uniform uint gl_SubGroupSizeARB;"
"flat in uint gl_SubGroupInvocationARB;"
"flat in uint64_t gl_SubGroupEqMaskARB;"
@@ -6112,10 +5332,8 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
stageBuiltins[EShLangGeometry] .append(ballotDecls);
stageBuiltins[EShLangCompute] .append(ballotDecls);
stageBuiltins[EShLangFragment] .append(fragmentBallotDecls);
-#ifdef NV_EXTENSIONS
stageBuiltins[EShLangMeshNV] .append(ballotDecls);
stageBuiltins[EShLangTaskNV] .append(ballotDecls);
-#endif
}
if ((profile != EEsProfile && version >= 140) ||
@@ -6129,7 +5347,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
// GL_KHR_shader_subgroup
if ((profile == EEsProfile && version >= 310) ||
(profile != EEsProfile && version >= 140)) {
- const char* ballotDecls =
+ const char* subgroupDecls =
"in mediump uint gl_SubgroupSize;"
"in mediump uint gl_SubgroupInvocationID;"
"in highp uvec4 gl_SubgroupEqMask;"
@@ -6137,8 +5355,13 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"in highp uvec4 gl_SubgroupGtMask;"
"in highp uvec4 gl_SubgroupLeMask;"
"in highp uvec4 gl_SubgroupLtMask;"
+ // GL_NV_shader_sm_builtins
+ "in highp uint gl_WarpsPerSMNV;"
+ "in highp uint gl_SMCountNV;"
+ "in highp uint gl_WarpIDNV;"
+ "in highp uint gl_SMIDNV;"
"\n";
- const char* fragmentBallotDecls =
+ const char* fragmentSubgroupDecls =
"flat in mediump uint gl_SubgroupSize;"
"flat in mediump uint gl_SubgroupInvocationID;"
"flat in highp uvec4 gl_SubgroupEqMask;"
@@ -6146,35 +5369,36 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
"flat in highp uvec4 gl_SubgroupGtMask;"
"flat in highp uvec4 gl_SubgroupLeMask;"
"flat in highp uvec4 gl_SubgroupLtMask;"
+ // GL_NV_shader_sm_builtins
+ "flat in highp uint gl_WarpsPerSMNV;"
+ "flat in highp uint gl_SMCountNV;"
+ "flat in highp uint gl_WarpIDNV;"
+ "flat in highp uint gl_SMIDNV;"
+ "\n";
+ const char* computeSubgroupDecls =
+ "in highp uint gl_NumSubgroups;"
+ "in highp uint gl_SubgroupID;"
"\n";
- stageBuiltins[EShLangVertex] .append(ballotDecls);
- stageBuiltins[EShLangTessControl] .append(ballotDecls);
- stageBuiltins[EShLangTessEvaluation].append(ballotDecls);
- stageBuiltins[EShLangGeometry] .append(ballotDecls);
- stageBuiltins[EShLangCompute] .append(ballotDecls);
- stageBuiltins[EShLangFragment] .append(fragmentBallotDecls);
-#ifdef NV_EXTENSIONS
- stageBuiltins[EShLangMeshNV] .append(ballotDecls);
- stageBuiltins[EShLangTaskNV] .append(ballotDecls);
-#endif
- stageBuiltins[EShLangCompute].append(
- "highp in uint gl_NumSubgroups;"
- "highp in uint gl_SubgroupID;"
- "\n");
-#ifdef NV_EXTENSIONS
- stageBuiltins[EShLangMeshNV].append(
- "highp in uint gl_NumSubgroups;"
- "highp in uint gl_SubgroupID;"
- "\n");
- stageBuiltins[EShLangTaskNV].append(
- "highp in uint gl_NumSubgroups;"
- "highp in uint gl_SubgroupID;"
- "\n");
-#endif
+ stageBuiltins[EShLangVertex] .append(subgroupDecls);
+ stageBuiltins[EShLangTessControl] .append(subgroupDecls);
+ stageBuiltins[EShLangTessEvaluation].append(subgroupDecls);
+ stageBuiltins[EShLangGeometry] .append(subgroupDecls);
+ stageBuiltins[EShLangCompute] .append(subgroupDecls);
+ stageBuiltins[EShLangCompute] .append(computeSubgroupDecls);
+ stageBuiltins[EShLangFragment] .append(fragmentSubgroupDecls);
+ stageBuiltins[EShLangMeshNV] .append(subgroupDecls);
+ stageBuiltins[EShLangMeshNV] .append(computeSubgroupDecls);
+ stageBuiltins[EShLangTaskNV] .append(subgroupDecls);
+ stageBuiltins[EShLangTaskNV] .append(computeSubgroupDecls);
+ stageBuiltins[EShLangRayGenNV] .append(subgroupDecls);
+ stageBuiltins[EShLangIntersectNV] .append(subgroupDecls);
+ stageBuiltins[EShLangAnyHitNV] .append(subgroupDecls);
+ stageBuiltins[EShLangClosestHitNV] .append(subgroupDecls);
+ stageBuiltins[EShLangMissNV] .append(subgroupDecls);
+ stageBuiltins[EShLangCallableNV] .append(subgroupDecls);
}
-#ifdef NV_EXTENSIONS
// GL_NV_ray_tracing
if (profile != EEsProfile && version >= 460) {
@@ -6274,7 +5498,6 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
stageBuiltins[EShLangClosestHitNV].append(deviceIndex);
stageBuiltins[EShLangMissNV].append(deviceIndex);
}
-#endif
if (version >= 300 /* both ES and non-ES */) {
stageBuiltins[EShLangFragment].append(
@@ -6296,6 +5519,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
commonBuiltins.append("const int gl_SemanticsAcquireRelease = 0x8;\n");
commonBuiltins.append("const int gl_SemanticsMakeAvailable = 0x2000;\n");
commonBuiltins.append("const int gl_SemanticsMakeVisible = 0x4000;\n");
+ commonBuiltins.append("const int gl_SemanticsVolatile = 0x8000;\n");
commonBuiltins.append("const int gl_StorageSemanticsNone = 0x0;\n");
commonBuiltins.append("const int gl_StorageSemanticsBuffer = 0x40;\n");
@@ -6303,6 +5527,7 @@ void TBuiltIns::initialize(int version, EProfile profile, const SpvVersion& spvV
commonBuiltins.append("const int gl_StorageSemanticsImage = 0x800;\n");
commonBuiltins.append("const int gl_StorageSemanticsOutput = 0x1000;\n");
}
+#endif
// printf("%s\n", commonBuiltins.c_str());
// printf("%s\n", stageBuiltins[EShLangFragment].c_str());
@@ -6318,19 +5543,27 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
// In this function proper, enumerate the types, then calls the next set of functions
// to enumerate all the uses for that type.
//
-#ifdef AMD_EXTENSIONS
- TBasicType bTypes[4] = { EbtFloat, EbtFloat16, EbtInt, EbtUint };
+
+ // enumerate all the types
+#ifdef GLSLANG_WEB
+ const TBasicType bTypes[] = { EbtFloat, EbtInt, EbtUint };
+ bool skipBuffer = true;
+ bool skipCubeArrayed = true;
+ const int image = 0;
#else
- TBasicType bTypes[3] = { EbtFloat, EbtInt, EbtUint };
-#endif
+ const TBasicType bTypes[] = { EbtFloat, EbtInt, EbtUint, EbtFloat16 };
bool skipBuffer = (profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 140);
bool skipCubeArrayed = (profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 130);
-
- // enumerate all the types
- for (int image = 0; image <= 1; ++image) { // loop over "bool" image vs sampler
-
+ for (int image = 0; image <= 1; ++image) // loop over "bool" image vs sampler
+#endif
+ {
for (int shadow = 0; shadow <= 1; ++shadow) { // loop over "bool" shadow or not
- for (int ms = 0; ms <=1; ++ms) {
+#ifdef GLSLANG_WEB
+ const int ms = 0;
+#else
+ for (int ms = 0; ms <= 1; ++ms) // loop over "bool" multisample or not
+#endif
+ {
if ((ms || image) && shadow)
continue;
if (ms && profile != EEsProfile && version < 150)
@@ -6341,20 +5574,23 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
continue;
for (int arrayed = 0; arrayed <= 1; ++arrayed) { // loop over "bool" arrayed or not
- for (int dim = Esd1D; dim < EsdNumDims; ++dim) { // 1D, 2D, ..., buffer
+#ifdef GLSLANG_WEB
+ for (int dim = Esd2D; dim <= EsdCube; ++dim) { // 2D, 3D, and Cube
+#else
+ for (int dim = Esd1D; dim < EsdNumDims; ++dim) { // 1D, ..., buffer, subpass
if (dim == EsdSubpass && spvVersion.vulkan == 0)
continue;
if (dim == EsdSubpass && (image || shadow || arrayed))
continue;
if ((dim == Esd1D || dim == EsdRect) && profile == EEsProfile)
continue;
- if (dim != Esd2D && dim != EsdSubpass && ms)
+ if (dim == EsdSubpass && spvVersion.vulkan == 0)
continue;
- if ((dim == Esd3D || dim == EsdRect) && arrayed)
+ if (dim == EsdSubpass && (image || shadow || arrayed))
continue;
- if (dim == Esd3D && shadow)
+ if ((dim == Esd1D || dim == EsdRect) && profile == EEsProfile)
continue;
- if (dim == EsdCube && arrayed && skipCubeArrayed)
+ if (dim != Esd2D && dim != EsdSubpass && ms)
continue;
if (dim == EsdBuffer && skipBuffer)
continue;
@@ -6362,31 +5598,35 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
continue;
if (ms && arrayed && profile == EEsProfile && version < 310)
continue;
-#ifdef AMD_EXTENSIONS
- for (int bType = 0; bType < 4; ++bType) { // float, float16, int, uint results
-
- if (shadow && bType > 1)
- continue;
+#endif
+ if (dim == Esd3D && shadow)
+ continue;
+ if (dim == EsdCube && arrayed && skipCubeArrayed)
+ continue;
+ if ((dim == Esd3D || dim == EsdRect) && arrayed)
+ continue;
- if (bTypes[bType] == EbtFloat16 && (profile == EEsProfile ||version < 450))
+ // Loop over the bTypes
+ for (size_t bType = 0; bType < sizeof(bTypes)/sizeof(TBasicType); ++bType) {
+#ifndef GLSLANG_WEB
+ if (bTypes[bType] == EbtFloat16 && (profile == EEsProfile || version < 450))
continue;
-#else
- for (int bType = 0; bType < 3; ++bType) { // float, int, uint results
-
- if (shadow && bType > 0)
+ if (dim == EsdRect && version < 140 && bType > 0)
continue;
#endif
- if (dim == EsdRect && version < 140 && bType > 0)
+ if (shadow && (bTypes[bType] == EbtInt || bTypes[bType] == EbtUint))
continue;
//
// Now, make all the function prototypes for the type we just built...
//
-
TSampler sampler;
+#ifndef GLSLANG_WEB
if (dim == EsdSubpass) {
sampler.setSubpass(bTypes[bType], ms ? true : false);
- } else if (image) {
+ } else
+#endif
+ if (image) {
sampler.setImage(bTypes[bType], (TSamplerDim)dim, arrayed ? true : false,
shadow ? true : false,
ms ? true : false);
@@ -6398,10 +5638,12 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
TString typeName = sampler.getString();
+#ifndef GLSLANG_WEB
if (dim == EsdSubpass) {
addSubpassSampling(sampler, typeName, version, profile);
continue;
}
+#endif
addQueryFunctions(sampler, typeName, version, profile);
@@ -6409,8 +5651,8 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
addImageFunctions(sampler, typeName, version, profile);
else {
addSamplingFunctions(sampler, typeName, version, profile);
+#ifndef GLSLANG_WEB
addGatherFunctions(sampler, typeName, version, profile);
-
if (spvVersion.vulkan > 0 && sampler.isCombined() && !sampler.shadow) {
// Base Vulkan allows texelFetch() for
// textureBuffer (i.e. without sampler).
@@ -6425,6 +5667,7 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
addSamplingFunctions(sampler, textureTypeName, version, profile);
addQueryFunctions(sampler, textureTypeName, version, profile);
}
+#endif
}
}
}
@@ -6436,7 +5679,6 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
//
// sparseTexelsResidentARB()
//
-
if (profile != EEsProfile && version >= 450) {
commonBuiltins.append("bool sparseTexelsResidentARB(int code);\n");
}
@@ -6450,14 +5692,25 @@ void TBuiltIns::add2ndGenerationSamplingImaging(int version, EProfile profile, c
//
void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile)
{
- if (sampler.image && ((profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 430)))
- return;
-
//
// textureSize() and imageSize()
//
int sizeDims = dimMap[sampler.dim] + (sampler.arrayed ? 1 : 0) - (sampler.dim == EsdCube ? 1 : 0);
+
+#ifdef GLSLANG_WEB
+ commonBuiltins.append("highp ");
+ commonBuiltins.append("ivec");
+ commonBuiltins.append(postfixes[sizeDims]);
+ commonBuiltins.append(" textureSize(");
+ commonBuiltins.append(typeName);
+ commonBuiltins.append(",int);\n");
+ return;
+#endif
+
+ if (sampler.isImage() && ((profile == EEsProfile && version < 310) || (profile != EEsProfile && version < 430)))
+ return;
+
if (profile == EEsProfile)
commonBuiltins.append("highp ");
if (sizeDims == 1)
@@ -6466,12 +5719,12 @@ void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int
commonBuiltins.append("ivec");
commonBuiltins.append(postfixes[sizeDims]);
}
- if (sampler.image)
+ if (sampler.isImage())
commonBuiltins.append(" imageSize(readonly writeonly volatile coherent ");
else
commonBuiltins.append(" textureSize(");
commonBuiltins.append(typeName);
- if (! sampler.image && sampler.dim != EsdRect && sampler.dim != EsdBuffer && ! sampler.ms)
+ if (! sampler.isImage() && ! sampler.isRect() && ! sampler.isBuffer() && ! sampler.isMultiSample())
commonBuiltins.append(",int);\n");
else
commonBuiltins.append(");\n");
@@ -6482,9 +5735,9 @@ void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int
// GL_ARB_shader_texture_image_samples
// TODO: spec issue? there are no memory qualifiers; how to query a writeonly/readonly image, etc?
- if (profile != EEsProfile && version >= 430 && sampler.ms) {
+ if (profile != EEsProfile && version >= 430 && sampler.isMultiSample()) {
commonBuiltins.append("int ");
- if (sampler.image)
+ if (sampler.isImage())
commonBuiltins.append("imageSamples(readonly writeonly volatile coherent ");
else
commonBuiltins.append("textureSamples(");
@@ -6496,40 +5749,28 @@ void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int
// textureQueryLod(), fragment stage only
//
- if (profile != EEsProfile && version >= 400 && sampler.combined && sampler.dim != EsdRect && ! sampler.ms && sampler.dim != EsdBuffer) {
-#ifdef AMD_EXTENSIONS
+ if (profile != EEsProfile && version >= 400 && sampler.isCombined() && sampler.dim != EsdRect &&
+ ! sampler.isMultiSample() && ! sampler.isBuffer()) {
for (int f16TexAddr = 0; f16TexAddr < 2; ++f16TexAddr) {
if (f16TexAddr && sampler.type != EbtFloat16)
continue;
-#endif
stageBuiltins[EShLangFragment].append("vec2 textureQueryLod(");
stageBuiltins[EShLangFragment].append(typeName);
if (dimMap[sampler.dim] == 1)
-#ifdef AMD_EXTENSIONS
if (f16TexAddr)
stageBuiltins[EShLangFragment].append(", float16_t");
else
stageBuiltins[EShLangFragment].append(", float");
-#else
- stageBuiltins[EShLangFragment].append(", float");
-#endif
else {
-#ifdef AMD_EXTENSIONS
if (f16TexAddr)
stageBuiltins[EShLangFragment].append(", f16vec");
else
stageBuiltins[EShLangFragment].append(", vec");
-#else
- stageBuiltins[EShLangFragment].append(", vec");
-#endif
stageBuiltins[EShLangFragment].append(postfixes[dimMap[sampler.dim]]);
}
stageBuiltins[EShLangFragment].append(");\n");
-#ifdef AMD_EXTENSIONS
}
-#endif
-#ifdef NV_EXTENSIONS
stageBuiltins[EShLangCompute].append("vec2 textureQueryLod(");
stageBuiltins[EShLangCompute].append(typeName);
if (dimMap[sampler.dim] == 1)
@@ -6539,14 +5780,14 @@ void TBuiltIns::addQueryFunctions(TSampler sampler, const TString& typeName, int
stageBuiltins[EShLangCompute].append(postfixes[dimMap[sampler.dim]]);
}
stageBuiltins[EShLangCompute].append(");\n");
-#endif
}
//
// textureQueryLevels()
//
- if (profile != EEsProfile && version >= 430 && ! sampler.image && sampler.dim != EsdRect && ! sampler.ms && sampler.dim != EsdBuffer) {
+ if (profile != EEsProfile && version >= 430 && ! sampler.isImage() && sampler.dim != EsdRect &&
+ ! sampler.isMultiSample() && ! sampler.isBuffer()) {
commonBuiltins.append("int textureQueryLevels(");
commonBuiltins.append(typeName);
commonBuiltins.append(");\n");
@@ -6573,7 +5814,7 @@ void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int
imageParams.append(", ivec");
imageParams.append(postfixes[dims]);
}
- if (sampler.ms)
+ if (sampler.isMultiSample())
imageParams.append(", int");
if (profile == EEsProfile)
@@ -6589,7 +5830,7 @@ void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int
commonBuiltins.append(prefixes[sampler.type]);
commonBuiltins.append("vec4);\n");
- if (sampler.dim != Esd1D && sampler.dim != EsdBuffer && profile != EEsProfile && version >= 450) {
+ if (! sampler.is1D() && ! sampler.isBuffer() && profile != EEsProfile && version >= 450) {
commonBuiltins.append("int sparseImageLoadARB(readonly volatile coherent ");
commonBuiltins.append(imageParams);
commonBuiltins.append(", out ");
@@ -6666,8 +5907,7 @@ void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int
}
}
-#ifdef AMD_EXTENSIONS
- if (sampler.dim == EsdRect || sampler.dim == EsdBuffer || sampler.shadow || sampler.ms)
+ if (sampler.dim == EsdRect || sampler.dim == EsdBuffer || sampler.shadow || sampler.isMultiSample())
return;
if (profile == EEsProfile || version < 450)
@@ -6693,7 +5933,7 @@ void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int
commonBuiltins.append(prefixes[sampler.type]);
commonBuiltins.append("vec4);\n");
- if (sampler.dim != Esd1D) {
+ if (! sampler.is1D()) {
commonBuiltins.append("int sparseImageLoadLodAMD(readonly volatile coherent ");
commonBuiltins.append(imageLodParams);
commonBuiltins.append(", out ");
@@ -6701,7 +5941,6 @@ void TBuiltIns::addImageFunctions(TSampler sampler, const TString& typeName, int
commonBuiltins.append("vec4");
commonBuiltins.append(");\n");
}
-#endif
}
//
@@ -6716,7 +5955,7 @@ void TBuiltIns::addSubpassSampling(TSampler sampler, const TString& typeName, in
stageBuiltins[EShLangFragment].append("vec4 subpassLoad");
stageBuiltins[EShLangFragment].append("(");
stageBuiltins[EShLangFragment].append(typeName.c_str());
- if (sampler.ms)
+ if (sampler.isMultiSample())
stageBuiltins[EShLangFragment].append(", int");
stageBuiltins[EShLangFragment].append(");\n");
}
@@ -6729,17 +5968,23 @@ void TBuiltIns::addSubpassSampling(TSampler sampler, const TString& typeName, in
//
void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile)
{
+#ifdef GLSLANG_WEB
+ profile = EEsProfile;
+ version = 310;
+#endif
+
//
// texturing
//
for (int proj = 0; proj <= 1; ++proj) { // loop over "bool" projective or not
- if (proj && (sampler.dim == EsdCube || sampler.dim == EsdBuffer || sampler.arrayed || sampler.ms || !sampler.combined))
+ if (proj && (sampler.dim == EsdCube || sampler.isBuffer() || sampler.arrayed || sampler.isMultiSample()
+ || !sampler.isCombined()))
continue;
for (int lod = 0; lod <= 1; ++lod) {
- if (lod && (sampler.dim == EsdBuffer || sampler.dim == EsdRect || sampler.ms || !sampler.combined))
+ if (lod && (sampler.isBuffer() || sampler.isRect() || sampler.isMultiSample() || !sampler.isCombined()))
continue;
if (lod && sampler.dim == Esd2D && sampler.arrayed && sampler.shadow)
continue;
@@ -6748,18 +5993,18 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
for (int bias = 0; bias <= 1; ++bias) {
- if (bias && (lod || sampler.ms || !sampler.combined))
+ if (bias && (lod || sampler.isMultiSample() || !sampler.isCombined()))
continue;
if (bias && (sampler.dim == Esd2D || sampler.dim == EsdCube) && sampler.shadow && sampler.arrayed)
continue;
- if (bias && (sampler.dim == EsdRect || sampler.dim == EsdBuffer))
+ if (bias && (sampler.isRect() || sampler.isBuffer()))
continue;
for (int offset = 0; offset <= 1; ++offset) { // loop over "bool" offset or not
if (proj + offset + bias + lod > 3)
continue;
- if (offset && (sampler.dim == EsdCube || sampler.dim == EsdBuffer || sampler.ms))
+ if (offset && (sampler.dim == EsdCube || sampler.isBuffer() || sampler.isMultiSample()))
continue;
for (int fetch = 0; fetch <= 1; ++fetch) { // loop over "bool" fetch or not
@@ -6770,14 +6015,15 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
continue;
if (fetch && (sampler.shadow || sampler.dim == EsdCube))
continue;
- if (fetch == 0 && (sampler.ms || sampler.dim == EsdBuffer || !sampler.combined))
+ if (fetch == 0 && (sampler.isMultiSample() || sampler.isBuffer()
+ || !sampler.isCombined()))
continue;
for (int grad = 0; grad <= 1; ++grad) { // loop over "bool" grad or not
- if (grad && (lod || bias || sampler.ms || !sampler.combined))
+ if (grad && (lod || bias || sampler.isMultiSample() || !sampler.isCombined()))
continue;
- if (grad && sampler.dim == EsdBuffer)
+ if (grad && sampler.isBuffer())
continue;
if (proj + offset + fetch + grad + bias + lod > 3)
continue;
@@ -6797,31 +6043,46 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
if (extraProj && ! proj)
continue;
- if (extraProj && (sampler.dim == Esd3D || sampler.shadow || !sampler.combined))
+ if (extraProj && (sampler.dim == Esd3D || sampler.shadow || !sampler.isCombined()))
continue;
-#ifdef AMD_EXTENSIONS
- for (int f16TexAddr = 0; f16TexAddr <= 1; ++f16TexAddr) { // loop over 16-bit floating-point texel addressing
+ // loop over 16-bit floating-point texel addressing
+#ifdef GLSLANG_WEB
+ const int f16TexAddr = 0;
+#else
+ for (int f16TexAddr = 0; f16TexAddr <= 1; ++f16TexAddr)
+#endif
+ {
if (f16TexAddr && sampler.type != EbtFloat16)
continue;
if (f16TexAddr && sampler.shadow && ! compare) {
compare = true; // compare argument is always present
totalDims--;
}
+ // loop over "bool" lod clamp
+#ifdef GLSLANG_WEB
+ const int lodClamp = 0;
+#else
+ for (int lodClamp = 0; lodClamp <= 1 ;++lodClamp)
#endif
- for (int lodClamp = 0; lodClamp <= 1 ;++lodClamp) { // loop over "bool" lod clamp
-
+ {
if (lodClamp && (profile == EEsProfile || version < 450))
continue;
if (lodClamp && (proj || lod || fetch))
continue;
- for (int sparse = 0; sparse <= 1; ++sparse) { // loop over "bool" sparse or not
-
+ // loop over "bool" sparse or not
+#ifdef GLSLANG_WEB
+ const int sparse = 0;
+#else
+ for (int sparse = 0; sparse <= 1; ++sparse)
+#endif
+ {
if (sparse && (profile == EEsProfile || version < 450))
continue;
- // Sparse sampling is not for 1D/1D array texture, buffer texture, and projective texture
- if (sparse && (sampler.dim == Esd1D || sampler.dim == EsdBuffer || proj))
+ // Sparse sampling is not for 1D/1D array texture, buffer texture, and
+ // projective texture
+ if (sparse && (sampler.is1D() || sampler.isBuffer() || proj))
continue;
TString s;
@@ -6831,14 +6092,10 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
s.append("int ");
else {
if (sampler.shadow)
-#ifdef AMD_EXTENSIONS
if (sampler.type == EbtFloat16)
s.append("float16_t ");
else
s.append("float ");
-#else
- s.append("float ");
-#endif
else {
s.append(prefixes[sampler.type]);
s.append("vec4 ");
@@ -6876,7 +6133,6 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
// sampler type
s.append(typeName);
-#ifdef AMD_EXTENSIONS
// P coordinate
if (extraProj) {
if (f16TexAddr)
@@ -6894,31 +6150,15 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
s.append(postfixes[totalDims]);
}
}
-#else
- // P coordinate
- if (extraProj)
- s.append(",vec4");
- else {
- s.append(",");
- TBasicType t = fetch ? EbtInt : EbtFloat;
- if (totalDims == 1)
- s.append(TType::getBasicString(t));
- else {
- s.append(prefixes[t]);
- s.append("vec");
- s.append(postfixes[totalDims]);
- }
- }
-#endif
// non-optional compare
if (compare)
s.append(",float");
// non-optional lod argument (lod that's not driven by lod loop) or sample
- if ((fetch && sampler.dim != EsdBuffer && sampler.dim != EsdRect && !sampler.ms) ||
- (sampler.ms && fetch))
+ if ((fetch && !sampler.isBuffer() &&
+ !sampler.isRect() && !sampler.isMultiSample())
+ || (sampler.isMultiSample() && fetch))
s.append(",int");
-#ifdef AMD_EXTENSIONS
// non-optional lod
if (lod) {
if (f16TexAddr)
@@ -6947,23 +6187,6 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
s.append(postfixes[dimMap[sampler.dim]]);
}
}
-#else
- // non-optional lod
- if (lod)
- s.append(",float");
-
- // gradient arguments
- if (grad) {
- if (dimMap[sampler.dim] == 1)
- s.append(",float,float");
- else {
- s.append(",vec");
- s.append(postfixes[dimMap[sampler.dim]]);
- s.append(",vec");
- s.append(postfixes[dimMap[sampler.dim]]);
- }
- }
-#endif
// offset
if (offset) {
if (dimMap[sampler.dim] == 1)
@@ -6974,7 +6197,6 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
}
}
-#ifdef AMD_EXTENSIONS
// lod clamp
if (lodClamp) {
if (f16TexAddr)
@@ -6982,29 +6204,19 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
else
s.append(",float");
}
-#else
- // lod clamp
- if (lodClamp)
- s.append(",float");
-#endif
// texel out (for sparse texture)
if (sparse) {
s.append(",out ");
if (sampler.shadow)
-#ifdef AMD_EXTENSIONS
if (sampler.type == EbtFloat16)
s.append("float16_t");
else
s.append("float");
-#else
- s.append("float");
-#endif
else {
s.append(prefixes[sampler.type]);
s.append("vec4");
}
}
-#ifdef AMD_EXTENSIONS
// optional bias
if (bias) {
if (f16TexAddr)
@@ -7012,27 +6224,18 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
else
s.append(",float");
}
-#else
- // optional bias
- if (bias)
- s.append(",float");
-#endif
s.append(");\n");
// Add to the per-language set of built-ins
if (bias || lodClamp) {
stageBuiltins[EShLangFragment].append(s);
-#ifdef NV_EXTENSIONS
stageBuiltins[EShLangCompute].append(s);
-#endif
} else
commonBuiltins.append(s);
}
}
-#ifdef AMD_EXTENSIONS
}
-#endif
}
}
}
@@ -7050,6 +6253,11 @@ void TBuiltIns::addSamplingFunctions(TSampler sampler, const TString& typeName,
//
void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, int version, EProfile profile)
{
+#ifdef GLSLANG_WEB
+ profile = EEsProfile;
+ version = 310;
+#endif
+
switch (sampler.dim) {
case Esd2D:
case EsdRect:
@@ -7059,18 +6267,16 @@ void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, in
return;
}
- if (sampler.ms)
+ if (sampler.isMultiSample())
return;
if (version < 140 && sampler.dim == EsdRect && sampler.type != EbtFloat)
return;
-#ifdef AMD_EXTENSIONS
for (int f16TexAddr = 0; f16TexAddr <= 1; ++f16TexAddr) { // loop over 16-bit floating-point texel addressing
if (f16TexAddr && sampler.type != EbtFloat16)
continue;
-#endif
for (int offset = 0; offset < 3; ++offset) { // loop over three forms of offset in the call name: none, Offset, and Offsets
for (int comp = 0; comp < 2; ++comp) { // loop over presence of comp argument
@@ -7118,14 +6324,10 @@ void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, in
s.append(typeName);
// P coordinate argument
-#ifdef AMD_EXTENSIONS
if (f16TexAddr)
s.append(",f16vec");
else
s.append(",vec");
-#else
- s.append(",vec");
-#endif
int totalDims = dimMap[sampler.dim] + (sampler.arrayed ? 1 : 0);
s.append(postfixes[totalDims]);
@@ -7153,14 +6355,11 @@ void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, in
s.append(");\n");
commonBuiltins.append(s);
-#ifdef AMD_EXTENSIONS
}
-#endif
}
}
}
-#ifdef AMD_EXTENSIONS
if (sampler.dim == EsdRect || sampler.shadow)
return;
@@ -7286,7 +6485,6 @@ void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, in
}
}
}
-#endif
}
//
@@ -7298,6 +6496,11 @@ void TBuiltIns::addGatherFunctions(TSampler sampler, const TString& typeName, in
//
void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language)
{
+#ifdef GLSLANG_WEB
+ version = 310;
+ profile = EEsProfile;
+#endif
+
//
// Initialize the context-dependent (resource-dependent) built-in strings for parsing.
//
@@ -7309,7 +6512,7 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
//============================================================================
TString& s = commonBuiltins;
- const int maxSize = 80;
+ const int maxSize = 200;
char builtInConstant[maxSize];
//
@@ -7355,6 +6558,7 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
s.append(builtInConstant);
}
+#ifndef GLSLANG_WEB
if (version >= 310) {
// geometry
@@ -7413,10 +6617,8 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
"in gl_PerVertex {"
"highp vec4 gl_Position;"
"highp float gl_PointSize;"
-#ifdef NV_EXTENSIONS
"highp vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering
"highp vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
-#endif
"} gl_in[gl_MaxPatchVertices];"
"\n");
}
@@ -7425,6 +6627,14 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
} else {
// non-ES profile
+ if (version > 400) {
+ snprintf(builtInConstant, maxSize, "const int gl_MaxVertexUniformVectors = %d;", resources.maxVertexUniformVectors);
+ s.append(builtInConstant);
+
+ snprintf(builtInConstant, maxSize, "const int gl_MaxFragmentUniformVectors = %d;", resources.maxFragmentUniformVectors);
+ s.append(builtInConstant);
+ }
+
snprintf(builtInConstant, maxSize, "const int gl_MaxVertexAttribs = %d;", resources.maxVertexAttribs);
s.append(builtInConstant);
@@ -7603,10 +6813,8 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
if (profile != EEsProfile && version >= 450)
s.append(
"float gl_CullDistance[];"
-#ifdef NV_EXTENSIONS
"vec4 gl_SecondaryPositionNV;" // GL_NV_stereo_view_rendering
"vec4 gl_PositionPerViewNV[];" // GL_NVX_multiview_per_view_attributes
-#endif
);
s.append(
"} gl_in[gl_MaxPatchVertices];"
@@ -7640,8 +6848,29 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
snprintf(builtInConstant, maxSize, "const int gl_MaxTransformFeedbackInterleavedComponents = %d;", resources.maxTransformFeedbackInterleavedComponents);
s.append(builtInConstant);
}
+#endif
+ }
+
+ // compute
+ if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 420)) {
+ snprintf(builtInConstant, maxSize, "const ivec3 gl_MaxComputeWorkGroupCount = ivec3(%d,%d,%d);", resources.maxComputeWorkGroupCountX,
+ resources.maxComputeWorkGroupCountY,
+ resources.maxComputeWorkGroupCountZ);
+ s.append(builtInConstant);
+ snprintf(builtInConstant, maxSize, "const ivec3 gl_MaxComputeWorkGroupSize = ivec3(%d,%d,%d);", resources.maxComputeWorkGroupSizeX,
+ resources.maxComputeWorkGroupSizeY,
+ resources.maxComputeWorkGroupSizeZ);
+ s.append(builtInConstant);
+
+ snprintf(builtInConstant, maxSize, "const int gl_MaxComputeUniformComponents = %d;", resources.maxComputeUniformComponents);
+ s.append(builtInConstant);
+ snprintf(builtInConstant, maxSize, "const int gl_MaxComputeTextureImageUnits = %d;", resources.maxComputeTextureImageUnits);
+ s.append(builtInConstant);
+
+ s.append("\n");
}
+#ifndef GLSLANG_WEB
// images (some in compute below)
if ((profile == EEsProfile && version >= 310) ||
(profile != EEsProfile && version >= 130)) {
@@ -7657,6 +6886,18 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
s.append(builtInConstant);
}
+ // compute
+ if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 420)) {
+ snprintf(builtInConstant, maxSize, "const int gl_MaxComputeImageUniforms = %d;", resources.maxComputeImageUniforms);
+ s.append(builtInConstant);
+ snprintf(builtInConstant, maxSize, "const int gl_MaxComputeAtomicCounters = %d;", resources.maxComputeAtomicCounters);
+ s.append(builtInConstant);
+ snprintf(builtInConstant, maxSize, "const int gl_MaxComputeAtomicCounterBuffers = %d;", resources.maxComputeAtomicCounterBuffers);
+ s.append(builtInConstant);
+
+ s.append("\n");
+ }
+
// atomic counters (some in compute below)
if ((profile == EEsProfile && version >= 310) ||
(profile != EEsProfile && version >= 420)) {
@@ -7694,31 +6935,6 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
s.append("\n");
}
- // compute
- if ((profile == EEsProfile && version >= 310) || (profile != EEsProfile && version >= 420)) {
- snprintf(builtInConstant, maxSize, "const ivec3 gl_MaxComputeWorkGroupCount = ivec3(%d,%d,%d);", resources.maxComputeWorkGroupCountX,
- resources.maxComputeWorkGroupCountY,
- resources.maxComputeWorkGroupCountZ);
- s.append(builtInConstant);
- snprintf(builtInConstant, maxSize, "const ivec3 gl_MaxComputeWorkGroupSize = ivec3(%d,%d,%d);", resources.maxComputeWorkGroupSizeX,
- resources.maxComputeWorkGroupSizeY,
- resources.maxComputeWorkGroupSizeZ);
- s.append(builtInConstant);
-
- snprintf(builtInConstant, maxSize, "const int gl_MaxComputeUniformComponents = %d;", resources.maxComputeUniformComponents);
- s.append(builtInConstant);
- snprintf(builtInConstant, maxSize, "const int gl_MaxComputeTextureImageUnits = %d;", resources.maxComputeTextureImageUnits);
- s.append(builtInConstant);
- snprintf(builtInConstant, maxSize, "const int gl_MaxComputeImageUniforms = %d;", resources.maxComputeImageUniforms);
- s.append(builtInConstant);
- snprintf(builtInConstant, maxSize, "const int gl_MaxComputeAtomicCounters = %d;", resources.maxComputeAtomicCounters);
- s.append(builtInConstant);
- snprintf(builtInConstant, maxSize, "const int gl_MaxComputeAtomicCounterBuffers = %d;", resources.maxComputeAtomicCounterBuffers);
- s.append(builtInConstant);
-
- s.append("\n");
- }
-
// GL_ARB_cull_distance
if (profile != EEsProfile && version >= 450) {
snprintf(builtInConstant, maxSize, "const int gl_MaxCullDistances = %d;", resources.maxCullDistances);
@@ -7734,15 +6950,6 @@ void TBuiltIns::initialize(const TBuiltInResource &resources, int version, EProf
s.append(builtInConstant);
}
-#ifdef AMD_EXTENSIONS
- // GL_AMD_gcn_shader
- if (profile != EEsProfile && version >= 450) {
- snprintf(builtInConstant, maxSize, "const int gl_SIMDGroupSizeAMD = 64;");
- s.append(builtInConstant);
- }
-#endif
-
-#ifdef NV_EXTENSIONS
// SPV_NV_mesh_shader
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
snprintf(builtInConstant, maxSize, "const int gl_MaxMeshOutputVerticesNV = %d;", resources.maxMeshOutputVerticesNV);
@@ -7846,6 +7053,11 @@ static void BuiltInVariable(const char* blockName, const char* name, TBuiltInVar
//
void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable)
{
+#ifdef GLSLANG_WEB
+ version = 310;
+ profile = EEsProfile;
+#endif
+
//
// Tag built-in variables and functions with additional qualifier and extension information
// that cannot be declared with the text strings.
@@ -7860,6 +7072,17 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
switch(language) {
case EShLangVertex:
+ if (spvVersion.vulkan > 0) {
+ BuiltInVariable("gl_VertexIndex", EbvVertexIndex, symbolTable);
+ BuiltInVariable("gl_InstanceIndex", EbvInstanceIndex, symbolTable);
+ }
+
+#ifndef GLSLANG_WEB
+ if (spvVersion.vulkan == 0) {
+ SpecialQualifier("gl_VertexID", EvqVertexId, EbvVertexId, symbolTable);
+ SpecialQualifier("gl_InstanceID", EvqInstanceId, EbvInstanceId, symbolTable);
+ }
+
if (profile != EEsProfile) {
if (version >= 440) {
symbolTable.setVariableExtensions("gl_BaseVertexARB", 1, &E_GL_ARB_shader_draw_parameters);
@@ -7886,19 +7109,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("readInvocationARB", 1, &E_GL_ARB_shader_ballot);
symbolTable.setFunctionExtensions("readFirstInvocationARB", 1, &E_GL_ARB_shader_ballot);
- BuiltInVariable("gl_SubGroupInvocationARB", EbvSubGroupInvocation, symbolTable);
- BuiltInVariable("gl_SubGroupEqMaskARB", EbvSubGroupEqMask, symbolTable);
- BuiltInVariable("gl_SubGroupGeMaskARB", EbvSubGroupGeMask, symbolTable);
- BuiltInVariable("gl_SubGroupGtMaskARB", EbvSubGroupGtMask, symbolTable);
- BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
- BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
-
- if (spvVersion.vulkan > 0)
- // Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
- SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
- else
- BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
-
if (version >= 430) {
symbolTable.setFunctionExtensions("anyInvocationARB", 1, &E_GL_ARB_shader_group_vote);
symbolTable.setFunctionExtensions("allInvocationsARB", 1, &E_GL_ARB_shader_group_vote);
@@ -7906,7 +7116,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
}
}
-#ifdef AMD_EXTENSIONS
+
if (profile != EEsProfile) {
symbolTable.setFunctionExtensions("minInvocationsAMD", 1, &E_GL_AMD_shader_ballot);
symbolTable.setFunctionExtensions("maxInvocationsAMD", 1, &E_GL_AMD_shader_ballot);
@@ -7940,6 +7150,9 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
}
if (profile != EEsProfile) {
+ symbolTable.setVariableExtensions("gl_SIMDGroupSizeAMD", 1, &E_GL_AMD_gcn_shader);
+ SpecialQualifier("gl_SIMDGroupSizeAMD", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+
symbolTable.setFunctionExtensions("cubeFaceIndexAMD", 1, &E_GL_AMD_gcn_shader);
symbolTable.setFunctionExtensions("cubeFaceCoordAMD", 1, &E_GL_AMD_gcn_shader);
symbolTable.setFunctionExtensions("timeAMD", 1, &E_GL_AMD_gcn_shader);
@@ -7949,15 +7162,21 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("fragmentMaskFetchAMD", 1, &E_GL_AMD_shader_fragment_mask);
symbolTable.setFunctionExtensions("fragmentFetchAMD", 1, &E_GL_AMD_shader_fragment_mask);
}
-#endif
-#ifdef NV_EXTENSIONS
+ symbolTable.setFunctionExtensions("countLeadingZeros", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("countTrailingZeros", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("absoluteDifference", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("addSaturate", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("subtractSaturate", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("average", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("averageRounded", 1, &E_GL_INTEL_shader_integer_functions2);
+ symbolTable.setFunctionExtensions("multiply32x16", 1, &E_GL_INTEL_shader_integer_functions2);
+
symbolTable.setFunctionExtensions("textureFootprintNV", 1, &E_GL_NV_shader_texture_footprint);
symbolTable.setFunctionExtensions("textureFootprintClampNV", 1, &E_GL_NV_shader_texture_footprint);
symbolTable.setFunctionExtensions("textureFootprintLodNV", 1, &E_GL_NV_shader_texture_footprint);
symbolTable.setFunctionExtensions("textureFootprintGradNV", 1, &E_GL_NV_shader_texture_footprint);
symbolTable.setFunctionExtensions("textureFootprintGradClampNV", 1, &E_GL_NV_shader_texture_footprint);
-#endif
// Compatibility variables, vertex only
if (spvVersion.spv == 0) {
BuiltInVariable("gl_Color", EbvColor, symbolTable);
@@ -7998,16 +7217,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("imageAtomicCompSwap", 1, &E_GL_OES_shader_image_atomic);
}
- if (spvVersion.vulkan == 0) {
- SpecialQualifier("gl_VertexID", EvqVertexId, EbvVertexId, symbolTable);
- SpecialQualifier("gl_InstanceID", EvqInstanceId, EbvInstanceId, symbolTable);
- }
-
- if (spvVersion.vulkan > 0) {
- BuiltInVariable("gl_VertexIndex", EbvVertexIndex, symbolTable);
- BuiltInVariable("gl_InstanceIndex", EbvInstanceIndex, symbolTable);
- }
-
if (version >= 300 /* both ES and non-ES */) {
symbolTable.setVariableExtensions("gl_ViewID_OVR", Num_OVR_multiview_EXTs, OVR_multiview_EXTs);
BuiltInVariable("gl_ViewID_OVR", EbvViewIndex, symbolTable);
@@ -8017,7 +7226,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("shadow2DEXT", 1, &E_GL_EXT_shadow_samplers);
symbolTable.setFunctionExtensions("shadow2DProjEXT", 1, &E_GL_EXT_shadow_samplers);
}
-
// Fall through
case EShLangTessControl:
@@ -8033,22 +7241,26 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_BoundingBox", EbvBoundingBox, symbolTable);
}
}
-
// Fall through
case EShLangTessEvaluation:
case EShLangGeometry:
+#endif
SpecialQualifier("gl_Position", EvqPosition, EbvPosition, symbolTable);
SpecialQualifier("gl_PointSize", EvqPointSize, EbvPointSize, symbolTable);
- SpecialQualifier("gl_ClipVertex", EvqClipVertex, EbvClipVertex, symbolTable);
BuiltInVariable("gl_in", "gl_Position", EbvPosition, symbolTable);
BuiltInVariable("gl_in", "gl_PointSize", EbvPointSize, symbolTable);
- BuiltInVariable("gl_in", "gl_ClipDistance", EbvClipDistance, symbolTable);
- BuiltInVariable("gl_in", "gl_CullDistance", EbvCullDistance, symbolTable);
BuiltInVariable("gl_out", "gl_Position", EbvPosition, symbolTable);
BuiltInVariable("gl_out", "gl_PointSize", EbvPointSize, symbolTable);
+
+#ifndef GLSLANG_WEB
+ SpecialQualifier("gl_ClipVertex", EvqClipVertex, EbvClipVertex, symbolTable);
+
+ BuiltInVariable("gl_in", "gl_ClipDistance", EbvClipDistance, symbolTable);
+ BuiltInVariable("gl_in", "gl_CullDistance", EbvCullDistance, symbolTable);
+
BuiltInVariable("gl_out", "gl_ClipDistance", EbvClipDistance, symbolTable);
BuiltInVariable("gl_out", "gl_CullDistance", EbvCullDistance, symbolTable);
@@ -8060,19 +7272,10 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_Layer", EbvLayer, symbolTable);
BuiltInVariable("gl_ViewportIndex", EbvViewportIndex, symbolTable);
-#ifdef NV_EXTENSIONS
if (language != EShLangGeometry) {
symbolTable.setVariableExtensions("gl_Layer", Num_viewportEXTs, viewportEXTs);
symbolTable.setVariableExtensions("gl_ViewportIndex", Num_viewportEXTs, viewportEXTs);
}
-#else
- if (language != EShLangGeometry && version >= 410) {
- symbolTable.setVariableExtensions("gl_Layer", 1, &E_GL_ARB_shader_viewport_layer_array);
- symbolTable.setVariableExtensions("gl_ViewportIndex", 1, &E_GL_ARB_shader_viewport_layer_array);
- }
-#endif
-
-#ifdef NV_EXTENSIONS
symbolTable.setVariableExtensions("gl_ViewportMask", 1, &E_GL_NV_viewport_array2);
symbolTable.setVariableExtensions("gl_SecondaryPositionNV", 1, &E_GL_NV_stereo_view_rendering);
symbolTable.setVariableExtensions("gl_SecondaryViewportMaskNV", 1, &E_GL_NV_stereo_view_rendering);
@@ -8085,7 +7288,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_PositionPerViewNV", EbvPositionPerViewNV, symbolTable);
BuiltInVariable("gl_ViewportMaskPerViewNV", EbvViewportMaskPerViewNV, symbolTable);
- if (language != EShLangVertex) {
+ if (language == EShLangVertex || language == EShLangGeometry) {
symbolTable.setVariableExtensions("gl_in", "gl_SecondaryPositionNV", 1, &E_GL_NV_stereo_view_rendering);
symbolTable.setVariableExtensions("gl_in", "gl_PositionPerViewNV", 1, &E_GL_NVX_multiview_per_view_attributes);
@@ -8103,7 +7306,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_out", "gl_SecondaryViewportMaskNV", EbvSecondaryViewportMaskNV, symbolTable);
BuiltInVariable("gl_out", "gl_PositionPerViewNV", EbvPositionPerViewNV, symbolTable);
BuiltInVariable("gl_out", "gl_ViewportMaskPerViewNV", EbvViewportMaskPerViewNV, symbolTable);
-#endif
BuiltInVariable("gl_PatchVerticesIn", EbvPatchVertices, symbolTable);
BuiltInVariable("gl_TessLevelOuter", EbvTessLevelOuter, symbolTable);
@@ -8159,7 +7361,22 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview);
BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable);
}
-
+
+ if (profile != EEsProfile) {
+ BuiltInVariable("gl_SubGroupInvocationARB", EbvSubGroupInvocation, symbolTable);
+ BuiltInVariable("gl_SubGroupEqMaskARB", EbvSubGroupEqMask, symbolTable);
+ BuiltInVariable("gl_SubGroupGeMaskARB", EbvSubGroupGeMask, symbolTable);
+ BuiltInVariable("gl_SubGroupGtMaskARB", EbvSubGroupGtMask, symbolTable);
+ BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
+ BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
+
+ if (spvVersion.vulkan > 0)
+ // Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
+ SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ else
+ BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
+ }
+
// GL_KHR_shader_subgroup
if ((profile == EEsProfile && version >= 310) ||
(profile != EEsProfile && version >= 140)) {
@@ -8178,8 +7395,18 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubgroupGtMask", EbvSubgroupGtMask2, symbolTable);
BuiltInVariable("gl_SubgroupLeMask", EbvSubgroupLeMask2, symbolTable);
BuiltInVariable("gl_SubgroupLtMask", EbvSubgroupLtMask2, symbolTable);
- }
+ // GL_NV_shader_sm_builtins
+ symbolTable.setVariableExtensions("gl_WarpsPerSMNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMCountNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_WarpIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ BuiltInVariable("gl_WarpsPerSMNV", EbvWarpsPerSM, symbolTable);
+ BuiltInVariable("gl_SMCountNV", EbvSMCount, symbolTable);
+ BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable);
+ BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable);
+ }
+#endif
break;
case EShLangFragment:
@@ -8196,6 +7423,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
}
}
SpecialQualifier("gl_FragDepth", EvqFragDepth, EbvFragDepth, symbolTable);
+#ifndef GLSLANG_WEB
SpecialQualifier("gl_FragDepthEXT", EvqFragDepth, EbvFragDepth, symbolTable);
SpecialQualifier("gl_HelperInvocation", EvqVaryingIn, EbvHelperInvocation, symbolTable);
@@ -8208,18 +7436,29 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_FragStencilRefARB", EbvFragStencilRef, symbolTable);
}
- if ((profile != EEsProfile && version >= 400) ||
+ if ((profile != EEsProfile && version >= 130) ||
(profile == EEsProfile && version >= 310)) {
- BuiltInVariable("gl_SampleID", EbvSampleId, symbolTable);
- BuiltInVariable("gl_SamplePosition", EbvSamplePosition, symbolTable);
- BuiltInVariable("gl_SampleMaskIn", EbvSampleMask, symbolTable);
- BuiltInVariable("gl_SampleMask", EbvSampleMask, symbolTable);
- if (profile == EEsProfile && version < 320) {
- symbolTable.setVariableExtensions("gl_SampleID", 1, &E_GL_OES_sample_variables);
- symbolTable.setVariableExtensions("gl_SamplePosition", 1, &E_GL_OES_sample_variables);
- symbolTable.setVariableExtensions("gl_SampleMaskIn", 1, &E_GL_OES_sample_variables);
- symbolTable.setVariableExtensions("gl_SampleMask", 1, &E_GL_OES_sample_variables);
- symbolTable.setVariableExtensions("gl_NumSamples", 1, &E_GL_OES_sample_variables);
+ BuiltInVariable("gl_SampleID", EbvSampleId, symbolTable);
+ BuiltInVariable("gl_SamplePosition", EbvSamplePosition, symbolTable);
+ BuiltInVariable("gl_SampleMask", EbvSampleMask, symbolTable);
+
+ if (profile != EEsProfile && version < 400) {
+ BuiltInVariable("gl_NumSamples", EbvSampleMask, symbolTable);
+
+ symbolTable.setVariableExtensions("gl_SampleMask", 1, &E_GL_ARB_sample_shading);
+ symbolTable.setVariableExtensions("gl_SampleID", 1, &E_GL_ARB_sample_shading);
+ symbolTable.setVariableExtensions("gl_SamplePosition", 1, &E_GL_ARB_sample_shading);
+ symbolTable.setVariableExtensions("gl_NumSamples", 1, &E_GL_ARB_sample_shading);
+ } else {
+ BuiltInVariable("gl_SampleMaskIn", EbvSampleMask, symbolTable);
+
+ if (profile == EEsProfile && version < 320) {
+ symbolTable.setVariableExtensions("gl_SampleID", 1, &E_GL_OES_sample_variables);
+ symbolTable.setVariableExtensions("gl_SamplePosition", 1, &E_GL_OES_sample_variables);
+ symbolTable.setVariableExtensions("gl_SampleMaskIn", 1, &E_GL_OES_sample_variables);
+ symbolTable.setVariableExtensions("gl_SampleMask", 1, &E_GL_OES_sample_variables);
+ symbolTable.setVariableExtensions("gl_NumSamples", 1, &E_GL_OES_sample_variables);
+ }
}
}
@@ -8350,7 +7589,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("textureGradOffsetClampARB", 1, &E_GL_ARB_sparse_texture_clamp);
}
-#ifdef AMD_EXTENSIONS
// E_GL_AMD_shader_explicit_vertex_parameter
if (profile != EEsProfile) {
symbolTable.setVariableExtensions("gl_BaryCoordNoPerspAMD", 1, &E_GL_AMD_shader_explicit_vertex_parameter);
@@ -8388,9 +7626,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("imageStoreLodAMD", 1, &E_GL_AMD_shader_image_load_store_lod);
symbolTable.setFunctionExtensions("sparseImageLoadLodAMD", 1, &E_GL_AMD_shader_image_load_store_lod);
}
-#endif
-
-#ifdef NV_EXTENSIONS
if (profile != EEsProfile && version >= 430) {
symbolTable.setVariableExtensions("gl_FragFullyCoveredNV", 1, &E_GL_NV_conservative_raster_underestimation);
BuiltInVariable("gl_FragFullyCoveredNV", EbvFragFullyCoveredNV, symbolTable);
@@ -8406,20 +7641,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_BaryCoordNV", EbvBaryCoordNV, symbolTable);
BuiltInVariable("gl_BaryCoordNoPerspNV", EbvBaryCoordNoPerspNV, symbolTable);
}
- if (((profile != EEsProfile && version >= 450) ||
- (profile == EEsProfile && version >= 320)) &&
- language == EShLangCompute) {
- symbolTable.setFunctionExtensions("dFdx", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("dFdy", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("fwidth", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("dFdxFine", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("dFdyFine", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("fwidthFine", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("dFdxCoarse", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("dFdyCoarse", 1, &E_GL_NV_compute_shader_derivatives);
- symbolTable.setFunctionExtensions("fwidthCoarse", 1, &E_GL_NV_compute_shader_derivatives);
- }
-#endif
if ((profile != EEsProfile && version >= 450) ||
(profile == EEsProfile && version >= 310)) {
@@ -8431,6 +7652,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_FragDepthEXT", 1, &E_GL_EXT_frag_depth);
+ symbolTable.setFunctionExtensions("clockARB", 1, &E_GL_ARB_shader_clock);
+ symbolTable.setFunctionExtensions("clock2x32ARB", 1, &E_GL_ARB_shader_clock);
+
+ symbolTable.setFunctionExtensions("clockRealtimeEXT", 1, &E_GL_EXT_shader_realtime_clock);
+ symbolTable.setFunctionExtensions("clockRealtime2x32EXT", 1, &E_GL_EXT_shader_realtime_clock);
+
if (profile == EEsProfile && version < 320) {
symbolTable.setVariableExtensions("gl_PrimitiveID", Num_AEP_geometry_shader, AEP_geometry_shader);
symbolTable.setVariableExtensions("gl_Layer", Num_AEP_geometry_shader, AEP_geometry_shader);
@@ -8553,8 +7780,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("subgroupQuadSwapHorizontal", 1, &E_GL_KHR_shader_subgroup_quad);
symbolTable.setFunctionExtensions("subgroupQuadSwapVertical", 1, &E_GL_KHR_shader_subgroup_quad);
symbolTable.setFunctionExtensions("subgroupQuadSwapDiagonal", 1, &E_GL_KHR_shader_subgroup_quad);
-
-#ifdef NV_EXTENSIONS
symbolTable.setFunctionExtensions("subgroupPartitionNV", 1, &E_GL_NV_shader_subgroup_partitioned);
symbolTable.setFunctionExtensions("subgroupPartitionedAddNV", 1, &E_GL_NV_shader_subgroup_partitioned);
symbolTable.setFunctionExtensions("subgroupPartitionedMulNV", 1, &E_GL_NV_shader_subgroup_partitioned);
@@ -8577,8 +7802,16 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("subgroupPartitionedExclusiveAndNV", 1, &E_GL_NV_shader_subgroup_partitioned);
symbolTable.setFunctionExtensions("subgroupPartitionedExclusiveOrNV", 1, &E_GL_NV_shader_subgroup_partitioned);
symbolTable.setFunctionExtensions("subgroupPartitionedExclusiveXorNV", 1, &E_GL_NV_shader_subgroup_partitioned);
-#endif
+ // GL_NV_shader_sm_builtins
+ symbolTable.setVariableExtensions("gl_WarpsPerSMNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMCountNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_WarpIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ BuiltInVariable("gl_WarpsPerSMNV", EbvWarpsPerSM, symbolTable);
+ BuiltInVariable("gl_SMCountNV", EbvSMCount, symbolTable);
+ BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable);
+ BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable);
}
if (profile == EEsProfile) {
@@ -8598,6 +7831,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_SemanticsAcquireRelease", 1, &E_GL_KHR_memory_scope_semantics);
symbolTable.setVariableExtensions("gl_SemanticsMakeAvailable", 1, &E_GL_KHR_memory_scope_semantics);
symbolTable.setVariableExtensions("gl_SemanticsMakeVisible", 1, &E_GL_KHR_memory_scope_semantics);
+ symbolTable.setVariableExtensions("gl_SemanticsVolatile", 1, &E_GL_KHR_memory_scope_semantics);
symbolTable.setVariableExtensions("gl_StorageSemanticsNone", 1, &E_GL_KHR_memory_scope_semantics);
symbolTable.setVariableExtensions("gl_StorageSemanticsBuffer", 1, &E_GL_KHR_memory_scope_semantics);
@@ -8605,6 +7839,9 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setVariableExtensions("gl_StorageSemanticsImage", 1, &E_GL_KHR_memory_scope_semantics);
symbolTable.setVariableExtensions("gl_StorageSemanticsOutput", 1, &E_GL_KHR_memory_scope_semantics);
}
+
+ symbolTable.setFunctionExtensions("helperInvocationEXT", 1, &E_GL_EXT_demote_to_helper_invocation);
+#endif
break;
case EShLangCompute:
@@ -8614,6 +7851,15 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_LocalInvocationID", EbvLocalInvocationId, symbolTable);
BuiltInVariable("gl_GlobalInvocationID", EbvGlobalInvocationId, symbolTable);
BuiltInVariable("gl_LocalInvocationIndex", EbvLocalInvocationIndex, symbolTable);
+ BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable);
+ BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable);
+
+#ifndef GLSLANG_WEB
+ if ((profile != EEsProfile && version >= 140) ||
+ (profile == EEsProfile && version >= 310)) {
+ symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group);
+ symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview);
+ }
if (profile != EEsProfile && version < 430) {
symbolTable.setVariableExtensions("gl_NumWorkGroups", 1, &E_GL_ARB_compute_shader);
@@ -8683,14 +7929,16 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubgroupGtMask", EbvSubgroupGtMask2, symbolTable);
BuiltInVariable("gl_SubgroupLeMask", EbvSubgroupLeMask2, symbolTable);
BuiltInVariable("gl_SubgroupLtMask", EbvSubgroupLtMask2, symbolTable);
- }
- if ((profile != EEsProfile && version >= 140) ||
- (profile == EEsProfile && version >= 310)) {
- symbolTable.setVariableExtensions("gl_DeviceIndex", 1, &E_GL_EXT_device_group);
- BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable);
- symbolTable.setVariableExtensions("gl_ViewIndex", 1, &E_GL_EXT_multiview);
- BuiltInVariable("gl_ViewIndex", EbvViewIndex, symbolTable);
+ // GL_NV_shader_sm_builtins
+ symbolTable.setVariableExtensions("gl_WarpsPerSMNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMCountNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_WarpIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ BuiltInVariable("gl_WarpsPerSMNV", EbvWarpsPerSM, symbolTable);
+ BuiltInVariable("gl_SMCountNV", EbvSMCount, symbolTable);
+ BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable);
+ BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable);
}
// GL_KHR_shader_subgroup
@@ -8705,12 +7953,28 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.setFunctionExtensions("subgroupMemoryBarrierShared", 1, &E_GL_KHR_shader_subgroup_basic);
}
- symbolTable.setFunctionExtensions("coopMatLoadNV", 1, &E_GL_NV_cooperative_matrix);
- symbolTable.setFunctionExtensions("coopMatStoreNV", 1, &E_GL_NV_cooperative_matrix);
- symbolTable.setFunctionExtensions("coopMatMulAddNV", 1, &E_GL_NV_cooperative_matrix);
+ {
+ const char *coopExt[2] = { E_GL_NV_cooperative_matrix, E_GL_NV_integer_cooperative_matrix };
+ symbolTable.setFunctionExtensions("coopMatLoadNV", 2, coopExt);
+ symbolTable.setFunctionExtensions("coopMatStoreNV", 2, coopExt);
+ symbolTable.setFunctionExtensions("coopMatMulAddNV", 2, coopExt);
+ }
+ if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
+ symbolTable.setFunctionExtensions("dFdx", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("dFdy", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("fwidth", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("dFdxFine", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("dFdyFine", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("fwidthFine", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("dFdxCoarse", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("dFdyCoarse", 1, &E_GL_NV_compute_shader_derivatives);
+ symbolTable.setFunctionExtensions("fwidthCoarse", 1, &E_GL_NV_compute_shader_derivatives);
+ }
+#endif
break;
-#ifdef NV_EXTENSIONS
+
+#ifndef GLSLANG_WEB
case EShLangRayGenNV:
case EShLangIntersectNV:
case EShLangAnyHitNV:
@@ -8754,8 +8018,62 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_WorldToObjectNV", EbvWorldToObjectNV, symbolTable);
BuiltInVariable("gl_IncomingRayFlagsNV", EbvIncomingRayFlagsNV, symbolTable);
BuiltInVariable("gl_DeviceIndex", EbvDeviceIndex, symbolTable);
- }
+
+ // GL_ARB_shader_ballot
+ symbolTable.setVariableExtensions("gl_SubGroupSizeARB", 1, &E_GL_ARB_shader_ballot);
+ symbolTable.setVariableExtensions("gl_SubGroupInvocationARB", 1, &E_GL_ARB_shader_ballot);
+ symbolTable.setVariableExtensions("gl_SubGroupEqMaskARB", 1, &E_GL_ARB_shader_ballot);
+ symbolTable.setVariableExtensions("gl_SubGroupGeMaskARB", 1, &E_GL_ARB_shader_ballot);
+ symbolTable.setVariableExtensions("gl_SubGroupGtMaskARB", 1, &E_GL_ARB_shader_ballot);
+ symbolTable.setVariableExtensions("gl_SubGroupLeMaskARB", 1, &E_GL_ARB_shader_ballot);
+ symbolTable.setVariableExtensions("gl_SubGroupLtMaskARB", 1, &E_GL_ARB_shader_ballot);
+
+ BuiltInVariable("gl_SubGroupInvocationARB", EbvSubGroupInvocation, symbolTable);
+ BuiltInVariable("gl_SubGroupEqMaskARB", EbvSubGroupEqMask, symbolTable);
+ BuiltInVariable("gl_SubGroupGeMaskARB", EbvSubGroupGeMask, symbolTable);
+ BuiltInVariable("gl_SubGroupGtMaskARB", EbvSubGroupGtMask, symbolTable);
+ BuiltInVariable("gl_SubGroupLeMaskARB", EbvSubGroupLeMask, symbolTable);
+ BuiltInVariable("gl_SubGroupLtMaskARB", EbvSubGroupLtMask, symbolTable);
+
+ if (spvVersion.vulkan > 0)
+ // Treat "gl_SubGroupSizeARB" as shader input instead of uniform for Vulkan
+ SpecialQualifier("gl_SubGroupSizeARB", EvqVaryingIn, EbvSubGroupSize, symbolTable);
+ else
+ BuiltInVariable("gl_SubGroupSizeARB", EbvSubGroupSize, symbolTable);
+
+ // GL_KHR_shader_subgroup
+ symbolTable.setVariableExtensions("gl_NumSubgroups", 1, &E_GL_KHR_shader_subgroup_basic);
+ symbolTable.setVariableExtensions("gl_SubgroupID", 1, &E_GL_KHR_shader_subgroup_basic);
+ symbolTable.setVariableExtensions("gl_SubgroupSize", 1, &E_GL_KHR_shader_subgroup_basic);
+ symbolTable.setVariableExtensions("gl_SubgroupInvocationID", 1, &E_GL_KHR_shader_subgroup_basic);
+ symbolTable.setVariableExtensions("gl_SubgroupEqMask", 1, &E_GL_KHR_shader_subgroup_ballot);
+ symbolTable.setVariableExtensions("gl_SubgroupGeMask", 1, &E_GL_KHR_shader_subgroup_ballot);
+ symbolTable.setVariableExtensions("gl_SubgroupGtMask", 1, &E_GL_KHR_shader_subgroup_ballot);
+ symbolTable.setVariableExtensions("gl_SubgroupLeMask", 1, &E_GL_KHR_shader_subgroup_ballot);
+ symbolTable.setVariableExtensions("gl_SubgroupLtMask", 1, &E_GL_KHR_shader_subgroup_ballot);
+
+ BuiltInVariable("gl_NumSubgroups", EbvNumSubgroups, symbolTable);
+ BuiltInVariable("gl_SubgroupID", EbvSubgroupID, symbolTable);
+ BuiltInVariable("gl_SubgroupSize", EbvSubgroupSize2, symbolTable);
+ BuiltInVariable("gl_SubgroupInvocationID", EbvSubgroupInvocation2, symbolTable);
+ BuiltInVariable("gl_SubgroupEqMask", EbvSubgroupEqMask2, symbolTable);
+ BuiltInVariable("gl_SubgroupGeMask", EbvSubgroupGeMask2, symbolTable);
+ BuiltInVariable("gl_SubgroupGtMask", EbvSubgroupGtMask2, symbolTable);
+ BuiltInVariable("gl_SubgroupLeMask", EbvSubgroupLeMask2, symbolTable);
+ BuiltInVariable("gl_SubgroupLtMask", EbvSubgroupLtMask2, symbolTable);
+
+ // GL_NV_shader_sm_builtins
+ symbolTable.setVariableExtensions("gl_WarpsPerSMNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMCountNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_WarpIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ BuiltInVariable("gl_WarpsPerSMNV", EbvWarpsPerSM, symbolTable);
+ BuiltInVariable("gl_SMCountNV", EbvSMCount, symbolTable);
+ BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable);
+ BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable);
+ }
break;
+
case EShLangMeshNV:
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
// per-vertex builtins
@@ -8887,6 +8205,16 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubgroupLtMask", EbvSubgroupLtMask2, symbolTable);
symbolTable.setFunctionExtensions("subgroupMemoryBarrierShared", 1, &E_GL_KHR_shader_subgroup_basic);
+
+ // GL_NV_shader_sm_builtins
+ symbolTable.setVariableExtensions("gl_WarpsPerSMNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMCountNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_WarpIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ BuiltInVariable("gl_WarpsPerSMNV", EbvWarpsPerSM, symbolTable);
+ BuiltInVariable("gl_SMCountNV", EbvSMCount, symbolTable);
+ BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable);
+ BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable);
}
break;
@@ -8977,6 +8305,16 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_SubgroupLtMask", EbvSubgroupLtMask2, symbolTable);
symbolTable.setFunctionExtensions("subgroupMemoryBarrierShared", 1, &E_GL_KHR_shader_subgroup_basic);
+
+ // GL_NV_shader_sm_builtins
+ symbolTable.setVariableExtensions("gl_WarpsPerSMNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMCountNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_WarpIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ symbolTable.setVariableExtensions("gl_SMIDNV", 1, &E_GL_NV_shader_sm_builtins);
+ BuiltInVariable("gl_WarpsPerSMNV", EbvWarpsPerSM, symbolTable);
+ BuiltInVariable("gl_SMCountNV", EbvSMCount, symbolTable);
+ BuiltInVariable("gl_WarpIDNV", EbvWarpID, symbolTable);
+ BuiltInVariable("gl_SMIDNV", EbvSMID, symbolTable);
}
break;
#endif
@@ -8992,74 +8330,10 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
// expected to be resolved through a library of functions, versus as
// operations.
//
- symbolTable.relateToOperator("not", EOpVectorLogicalNot);
- symbolTable.relateToOperator("matrixCompMult", EOpMul);
- // 120 and 150 are correct for both ES and desktop
- if (version >= 120) {
- symbolTable.relateToOperator("outerProduct", EOpOuterProduct);
- symbolTable.relateToOperator("transpose", EOpTranspose);
- if (version >= 150) {
- symbolTable.relateToOperator("determinant", EOpDeterminant);
- symbolTable.relateToOperator("inverse", EOpMatrixInverse);
- }
- }
+ relateTabledBuiltins(version, profile, spvVersion, language, symbolTable);
- symbolTable.relateToOperator("mod", EOpMod);
- symbolTable.relateToOperator("modf", EOpModf);
-
- symbolTable.relateToOperator("equal", EOpVectorEqual);
- symbolTable.relateToOperator("notEqual", EOpVectorNotEqual);
- symbolTable.relateToOperator("lessThan", EOpLessThan);
- symbolTable.relateToOperator("greaterThan", EOpGreaterThan);
- symbolTable.relateToOperator("lessThanEqual", EOpLessThanEqual);
- symbolTable.relateToOperator("greaterThanEqual", EOpGreaterThanEqual);
-
- symbolTable.relateToOperator("radians", EOpRadians);
- symbolTable.relateToOperator("degrees", EOpDegrees);
- symbolTable.relateToOperator("sin", EOpSin);
- symbolTable.relateToOperator("cos", EOpCos);
- symbolTable.relateToOperator("tan", EOpTan);
- symbolTable.relateToOperator("asin", EOpAsin);
- symbolTable.relateToOperator("acos", EOpAcos);
- symbolTable.relateToOperator("atan", EOpAtan);
- symbolTable.relateToOperator("sinh", EOpSinh);
- symbolTable.relateToOperator("cosh", EOpCosh);
- symbolTable.relateToOperator("tanh", EOpTanh);
- symbolTable.relateToOperator("asinh", EOpAsinh);
- symbolTable.relateToOperator("acosh", EOpAcosh);
- symbolTable.relateToOperator("atanh", EOpAtanh);
-
- symbolTable.relateToOperator("pow", EOpPow);
- symbolTable.relateToOperator("exp2", EOpExp2);
- symbolTable.relateToOperator("log", EOpLog);
- symbolTable.relateToOperator("exp", EOpExp);
- symbolTable.relateToOperator("log2", EOpLog2);
- symbolTable.relateToOperator("sqrt", EOpSqrt);
- symbolTable.relateToOperator("inversesqrt", EOpInverseSqrt);
-
- symbolTable.relateToOperator("abs", EOpAbs);
- symbolTable.relateToOperator("sign", EOpSign);
- symbolTable.relateToOperator("floor", EOpFloor);
- symbolTable.relateToOperator("trunc", EOpTrunc);
- symbolTable.relateToOperator("round", EOpRound);
- symbolTable.relateToOperator("roundEven", EOpRoundEven);
- symbolTable.relateToOperator("ceil", EOpCeil);
- symbolTable.relateToOperator("fract", EOpFract);
- symbolTable.relateToOperator("min", EOpMin);
- symbolTable.relateToOperator("max", EOpMax);
- symbolTable.relateToOperator("clamp", EOpClamp);
- symbolTable.relateToOperator("mix", EOpMix);
- symbolTable.relateToOperator("step", EOpStep);
- symbolTable.relateToOperator("smoothstep", EOpSmoothStep);
-
- symbolTable.relateToOperator("isnan", EOpIsNan);
- symbolTable.relateToOperator("isinf", EOpIsInf);
-
- symbolTable.relateToOperator("floatBitsToInt", EOpFloatBitsToInt);
- symbolTable.relateToOperator("floatBitsToUint", EOpFloatBitsToUint);
- symbolTable.relateToOperator("intBitsToFloat", EOpIntBitsToFloat);
- symbolTable.relateToOperator("uintBitsToFloat", EOpUintBitsToFloat);
+#ifndef GLSLANG_WEB
symbolTable.relateToOperator("doubleBitsToInt64", EOpDoubleBitsToInt64);
symbolTable.relateToOperator("doubleBitsToUint64", EOpDoubleBitsToUint64);
symbolTable.relateToOperator("int64BitsToDouble", EOpInt64BitsToDouble);
@@ -9074,11 +8348,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("int16BitsToHalf", EOpInt16BitsToFloat16);
symbolTable.relateToOperator("uint16BitsToHalf", EOpUint16BitsToFloat16);
- symbolTable.relateToOperator("packSnorm2x16", EOpPackSnorm2x16);
- symbolTable.relateToOperator("unpackSnorm2x16", EOpUnpackSnorm2x16);
- symbolTable.relateToOperator("packUnorm2x16", EOpPackUnorm2x16);
- symbolTable.relateToOperator("unpackUnorm2x16", EOpUnpackUnorm2x16);
-
symbolTable.relateToOperator("packSnorm4x8", EOpPackSnorm4x8);
symbolTable.relateToOperator("unpackSnorm4x8", EOpUnpackSnorm4x8);
symbolTable.relateToOperator("packUnorm4x8", EOpPackUnorm4x8);
@@ -9087,9 +8356,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("packDouble2x32", EOpPackDouble2x32);
symbolTable.relateToOperator("unpackDouble2x32", EOpUnpackDouble2x32);
- symbolTable.relateToOperator("packHalf2x16", EOpPackHalf2x16);
- symbolTable.relateToOperator("unpackHalf2x16", EOpUnpackHalf2x16);
-
symbolTable.relateToOperator("packInt2x32", EOpPackInt2x32);
symbolTable.relateToOperator("unpackInt2x32", EOpUnpackInt2x32);
symbolTable.relateToOperator("packUint2x32", EOpPackUint2x32);
@@ -9115,33 +8381,10 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("unpack16", EOpUnpack16);
symbolTable.relateToOperator("unpack8", EOpUnpack8);
- symbolTable.relateToOperator("length", EOpLength);
- symbolTable.relateToOperator("distance", EOpDistance);
- symbolTable.relateToOperator("dot", EOpDot);
- symbolTable.relateToOperator("cross", EOpCross);
- symbolTable.relateToOperator("normalize", EOpNormalize);
- symbolTable.relateToOperator("faceforward", EOpFaceForward);
- symbolTable.relateToOperator("reflect", EOpReflect);
- symbolTable.relateToOperator("refract", EOpRefract);
-
- symbolTable.relateToOperator("any", EOpAny);
- symbolTable.relateToOperator("all", EOpAll);
-
- symbolTable.relateToOperator("barrier", EOpBarrier);
symbolTable.relateToOperator("controlBarrier", EOpBarrier);
- symbolTable.relateToOperator("memoryBarrier", EOpMemoryBarrier);
symbolTable.relateToOperator("memoryBarrierAtomicCounter", EOpMemoryBarrierAtomicCounter);
- symbolTable.relateToOperator("memoryBarrierBuffer", EOpMemoryBarrierBuffer);
symbolTable.relateToOperator("memoryBarrierImage", EOpMemoryBarrierImage);
- symbolTable.relateToOperator("atomicAdd", EOpAtomicAdd);
- symbolTable.relateToOperator("atomicMin", EOpAtomicMin);
- symbolTable.relateToOperator("atomicMax", EOpAtomicMax);
- symbolTable.relateToOperator("atomicAnd", EOpAtomicAnd);
- symbolTable.relateToOperator("atomicOr", EOpAtomicOr);
- symbolTable.relateToOperator("atomicXor", EOpAtomicXor);
- symbolTable.relateToOperator("atomicExchange", EOpAtomicExchange);
- symbolTable.relateToOperator("atomicCompSwap", EOpAtomicCompSwap);
symbolTable.relateToOperator("atomicLoad", EOpAtomicLoad);
symbolTable.relateToOperator("atomicStore", EOpAtomicStore);
@@ -9149,6 +8392,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("atomicCounterDecrement", EOpAtomicCounterDecrement);
symbolTable.relateToOperator("atomicCounter", EOpAtomicCounter);
+ symbolTable.relateToOperator("clockARB", EOpReadClockSubgroupKHR);
+ symbolTable.relateToOperator("clock2x32ARB", EOpReadClockSubgroupKHR);
+
+ symbolTable.relateToOperator("clockRealtimeEXT", EOpReadClockDeviceKHR);
+ symbolTable.relateToOperator("clockRealtime2x32EXT", EOpReadClockDeviceKHR);
+
if (profile != EEsProfile && version >= 460) {
symbolTable.relateToOperator("atomicCounterAdd", EOpAtomicCounterAdd);
symbolTable.relateToOperator("atomicCounterSubtract", EOpAtomicCounterSubtract);
@@ -9175,6 +8424,17 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("findLSB", EOpFindLSB);
symbolTable.relateToOperator("findMSB", EOpFindMSB);
+ symbolTable.relateToOperator("helperInvocationEXT", EOpIsHelperInvocation);
+
+ symbolTable.relateToOperator("countLeadingZeros", EOpCountLeadingZeros);
+ symbolTable.relateToOperator("countTrailingZeros", EOpCountTrailingZeros);
+ symbolTable.relateToOperator("absoluteDifference", EOpAbsDifference);
+ symbolTable.relateToOperator("addSaturate", EOpAddSaturate);
+ symbolTable.relateToOperator("subtractSaturate", EOpSubSaturate);
+ symbolTable.relateToOperator("average", EOpAverage);
+ symbolTable.relateToOperator("averageRounded", EOpAverageRounded);
+ symbolTable.relateToOperator("multiply32x16", EOpMul32x16);
+
if (PureOperatorBuiltins) {
symbolTable.relateToOperator("imageSize", EOpImageQuerySize);
symbolTable.relateToOperator("imageSamples", EOpImageQuerySamples);
@@ -9194,24 +8454,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("subpassLoad", EOpSubpassLoad);
symbolTable.relateToOperator("subpassLoadMS", EOpSubpassLoadMS);
- symbolTable.relateToOperator("textureSize", EOpTextureQuerySize);
- symbolTable.relateToOperator("textureQueryLod", EOpTextureQueryLod);
- symbolTable.relateToOperator("textureQueryLevels", EOpTextureQueryLevels);
- symbolTable.relateToOperator("textureSamples", EOpTextureQuerySamples);
- symbolTable.relateToOperator("texture", EOpTexture);
- symbolTable.relateToOperator("textureProj", EOpTextureProj);
- symbolTable.relateToOperator("textureLod", EOpTextureLod);
- symbolTable.relateToOperator("textureOffset", EOpTextureOffset);
- symbolTable.relateToOperator("texelFetch", EOpTextureFetch);
- symbolTable.relateToOperator("texelFetchOffset", EOpTextureFetchOffset);
- symbolTable.relateToOperator("textureProjOffset", EOpTextureProjOffset);
- symbolTable.relateToOperator("textureLodOffset", EOpTextureLodOffset);
- symbolTable.relateToOperator("textureProjLod", EOpTextureProjLod);
- symbolTable.relateToOperator("textureProjLodOffset", EOpTextureProjLodOffset);
- symbolTable.relateToOperator("textureGrad", EOpTextureGrad);
- symbolTable.relateToOperator("textureGradOffset", EOpTextureGradOffset);
- symbolTable.relateToOperator("textureProjGrad", EOpTextureProjGrad);
- symbolTable.relateToOperator("textureProjGradOffset", EOpTextureProjGradOffset);
symbolTable.relateToOperator("textureGather", EOpTextureGather);
symbolTable.relateToOperator("textureGatherOffset", EOpTextureGatherOffset);
symbolTable.relateToOperator("textureGatherOffsets", EOpTextureGatherOffsets);
@@ -9221,17 +8463,17 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("noise3", EOpNoise);
symbolTable.relateToOperator("noise4", EOpNoise);
-#ifdef NV_EXTENSIONS
symbolTable.relateToOperator("textureFootprintNV", EOpImageSampleFootprintNV);
symbolTable.relateToOperator("textureFootprintClampNV", EOpImageSampleFootprintClampNV);
symbolTable.relateToOperator("textureFootprintLodNV", EOpImageSampleFootprintLodNV);
symbolTable.relateToOperator("textureFootprintGradNV", EOpImageSampleFootprintGradNV);
symbolTable.relateToOperator("textureFootprintGradClampNV", EOpImageSampleFootprintGradClampNV);
-#endif
+
+ if (spvVersion.spv == 0 && IncludeLegacy(version, profile, spvVersion))
+ symbolTable.relateToOperator("ftransform", EOpFtransform);
if (spvVersion.spv == 0 && (IncludeLegacy(version, profile, spvVersion) ||
(profile == EEsProfile && version == 100))) {
- symbolTable.relateToOperator("ftransform", EOpFtransform);
symbolTable.relateToOperator("texture1D", EOpTexture);
symbolTable.relateToOperator("texture1DGradARB", EOpTextureGrad);
@@ -9323,7 +8565,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("allInvocations", EOpAllInvocations);
symbolTable.relateToOperator("allInvocationsEqual", EOpAllInvocationsEqual);
}
-#ifdef AMD_EXTENSIONS
symbolTable.relateToOperator("minInvocationsAMD", EOpMinInvocations);
symbolTable.relateToOperator("maxInvocationsAMD", EOpMaxInvocations);
symbolTable.relateToOperator("addInvocationsAMD", EOpAddInvocations);
@@ -9368,7 +8609,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("fragmentMaskFetchAMD", EOpFragmentMaskFetch);
symbolTable.relateToOperator("fragmentFetchAMD", EOpFragmentFetch);
-#endif
}
// GL_KHR_shader_subgroup
@@ -9429,7 +8669,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("subgroupQuadSwapVertical", EOpSubgroupQuadSwapVertical);
symbolTable.relateToOperator("subgroupQuadSwapDiagonal", EOpSubgroupQuadSwapDiagonal);
-#ifdef NV_EXTENSIONS
symbolTable.relateToOperator("subgroupPartitionNV", EOpSubgroupPartition);
symbolTable.relateToOperator("subgroupPartitionedAddNV", EOpSubgroupPartitionedAdd);
symbolTable.relateToOperator("subgroupPartitionedMulNV", EOpSubgroupPartitionedMul);
@@ -9452,7 +8691,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("subgroupPartitionedExclusiveAndNV", EOpSubgroupPartitionedExclusiveAnd);
symbolTable.relateToOperator("subgroupPartitionedExclusiveOrNV", EOpSubgroupPartitionedExclusiveOr);
symbolTable.relateToOperator("subgroupPartitionedExclusiveXorNV", EOpSubgroupPartitionedExclusiveXor);
-#endif
}
if (profile == EEsProfile) {
@@ -9477,9 +8715,6 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
break;
case EShLangFragment:
- symbolTable.relateToOperator("dFdx", EOpDPdx);
- symbolTable.relateToOperator("dFdy", EOpDPdy);
- symbolTable.relateToOperator("fwidth", EOpFwidth);
if (profile != EEsProfile && version >= 400) {
symbolTable.relateToOperator("dFdxFine", EOpDPdxFine);
symbolTable.relateToOperator("dFdyFine", EOpDPdyFine);
@@ -9492,17 +8727,16 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("interpolateAtSample", EOpInterpolateAtSample);
symbolTable.relateToOperator("interpolateAtOffset", EOpInterpolateAtOffset);
-#ifdef AMD_EXTENSIONS
if (profile != EEsProfile)
symbolTable.relateToOperator("interpolateAtVertexAMD", EOpInterpolateAtVertex);
-#endif
+
+ symbolTable.relateToOperator("beginInvocationInterlockARB", EOpBeginInvocationInterlock);
+ symbolTable.relateToOperator("endInvocationInterlockARB", EOpEndInvocationInterlock);
+
break;
case EShLangCompute:
- symbolTable.relateToOperator("memoryBarrierShared", EOpMemoryBarrierShared);
- symbolTable.relateToOperator("groupMemoryBarrier", EOpGroupMemoryBarrier);
symbolTable.relateToOperator("subgroupMemoryBarrierShared", EOpSubgroupMemoryBarrierShared);
-#ifdef NV_EXTENSIONS
if ((profile != EEsProfile && version >= 450) ||
(profile == EEsProfile && version >= 320)) {
symbolTable.relateToOperator("dFdx", EOpDPdx);
@@ -9515,13 +8749,11 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
symbolTable.relateToOperator("dFdyCoarse", EOpDPdyCoarse);
symbolTable.relateToOperator("fwidthCoarse",EOpFwidthCoarse);
}
-#endif
symbolTable.relateToOperator("coopMatLoadNV", EOpCooperativeMatrixLoad);
symbolTable.relateToOperator("coopMatStoreNV", EOpCooperativeMatrixStore);
symbolTable.relateToOperator("coopMatMulAddNV", EOpCooperativeMatrixMulAdd);
break;
-#ifdef NV_EXTENSIONS
case EShLangRayGenNV:
case EShLangClosestHitNV:
case EShLangMissNV:
@@ -9554,13 +8786,14 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
if ((profile != EEsProfile && version >= 450) || (profile == EEsProfile && version >= 320)) {
symbolTable.relateToOperator("memoryBarrierShared", EOpMemoryBarrierShared);
symbolTable.relateToOperator("groupMemoryBarrier", EOpGroupMemoryBarrier);
+ symbolTable.relateToOperator("subgroupMemoryBarrierShared", EOpSubgroupMemoryBarrierShared);
}
break;
-#endif
default:
assert(false && "Language not supported");
}
+#endif
}
//
@@ -9574,6 +8807,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
//
void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources)
{
+#ifndef GLSLANG_WEB
if (profile != EEsProfile && version >= 430 && version < 440) {
symbolTable.setVariableExtensions("gl_MaxTransformFeedbackBuffers", 1, &E_GL_ARB_enhanced_layouts);
symbolTable.setVariableExtensions("gl_MaxTransformFeedbackInterleavedComponents", 1, &E_GL_ARB_enhanced_layouts);
@@ -9619,6 +8853,12 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
BuiltInVariable("gl_in", "gl_TexCoord", EbvTexCoord, symbolTable);
BuiltInVariable("gl_in", "gl_FogFragCoord", EbvFogFragCoord, symbolTable);
+ symbolTable.setVariableExtensions("gl_in", "gl_SecondaryPositionNV", 1, &E_GL_NV_stereo_view_rendering);
+ symbolTable.setVariableExtensions("gl_in", "gl_PositionPerViewNV", 1, &E_GL_NVX_multiview_per_view_attributes);
+
+ BuiltInVariable("gl_in", "gl_SecondaryPositionNV", EbvSecondaryPositionNV, symbolTable);
+ BuiltInVariable("gl_in", "gl_PositionPerViewNV", EbvPositionPerViewNV, symbolTable);
+
// extension requirements
if (profile == EEsProfile) {
symbolTable.setVariableExtensions("gl_in", "gl_PointSize", Num_AEP_tessellation_point_size, AEP_tessellation_point_size);
@@ -9629,6 +8869,7 @@ void TBuiltIns::identifyBuiltIns(int version, EProfile profile, const SpvVersion
default:
break;
}
+#endif
}
} // end namespace glslang
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Initialize.h b/thirdparty/glslang/glslang/MachineIndependent/Initialize.h
index b5de324233..ac8ec33e99 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Initialize.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/Initialize.h
@@ -91,6 +91,8 @@ public:
void identifyBuiltIns(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage language, TSymbolTable& symbolTable, const TBuiltInResource &resources);
protected:
+ void addTabledBuiltins(int version, EProfile profile, const SpvVersion& spvVersion);
+ void relateTabledBuiltins(int version, EProfile profile, const SpvVersion& spvVersion, EShLanguage, TSymbolTable&);
void add2ndGenerationSamplingImaging(int version, EProfile profile, const SpvVersion& spvVersion);
void addSubpassSampling(TSampler, const TString& typeName, int version, EProfile profile);
void addQueryFunctions(TSampler, const TString& typeName, int version, EProfile profile);
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp b/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp
index 584d880501..d0f86e6389 100644..100755
--- a/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Intermediate.cpp
@@ -123,12 +123,12 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn
if ((op == EOpAdd || op == EOpSub) && extensionRequested(E_GL_EXT_buffer_reference2)) {
// No addressing math on struct with unsized array.
- if ((left->getBasicType() == EbtReference && left->getType().getReferentType()->containsUnsizedArray()) ||
- (right->getBasicType() == EbtReference && right->getType().getReferentType()->containsUnsizedArray())) {
+ if ((left->isReference() && left->getType().getReferentType()->containsUnsizedArray()) ||
+ (right->isReference() && right->getType().getReferentType()->containsUnsizedArray())) {
return nullptr;
}
- if (left->getBasicType() == EbtReference && isTypeInt(right->getBasicType())) {
+ if (left->isReference() && isTypeInt(right->getBasicType())) {
const TType& referenceType = left->getType();
TIntermConstantUnion* size = addConstantUnion((unsigned long long)computeBufferReferenceTypeSize(left->getType()), loc, true);
left = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, left, TType(EbtUint64));
@@ -141,7 +141,7 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn
return node;
}
- if (op == EOpAdd && right->getBasicType() == EbtReference && isTypeInt(left->getBasicType())) {
+ if (op == EOpAdd && right->isReference() && isTypeInt(left->getBasicType())) {
const TType& referenceType = right->getType();
TIntermConstantUnion* size = addConstantUnion((unsigned long long)computeBufferReferenceTypeSize(right->getType()), loc, true);
right = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, right, TType(EbtUint64));
@@ -154,7 +154,7 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn
return node;
}
- if (op == EOpSub && left->getBasicType() == EbtReference && right->getBasicType() == EbtReference) {
+ if (op == EOpSub && left->isReference() && right->isReference()) {
TIntermConstantUnion* size = addConstantUnion((long long)computeBufferReferenceTypeSize(left->getType()), loc, true);
left = addBuiltInFunctionCall(loc, EOpConvPtrToUint64, true, left, TType(EbtUint64));
@@ -170,7 +170,7 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn
}
// No other math operators supported on references
- if (left->getBasicType() == EbtReference || right->getBasicType() == EbtReference) {
+ if (left->isReference() || right->isReference()) {
return nullptr;
}
}
@@ -216,7 +216,7 @@ TIntermTyped* TIntermediate::addBinaryMath(TOperator op, TIntermTyped* left, TIn
node->getWritableType().getQualifier().makeSpecConstant();
// If must propagate nonuniform, make a nonuniform.
- if ((node->getLeft()->getQualifier().nonUniform || node->getRight()->getQualifier().nonUniform) &&
+ if ((node->getLeft()->getQualifier().isNonUniform() || node->getRight()->getQualifier().isNonUniform()) &&
isNonuniformPropagating(node->getOp()))
node->getWritableType().getQualifier().nonUniform = true;
@@ -290,7 +290,7 @@ TIntermTyped* TIntermediate::addAssign(TOperator op, TIntermTyped* left, TInterm
// Convert "reference += int" to "reference = reference + int". We need this because the
// "reference + int" calculation involves a cast back to the original type, which makes it
// not an lvalue.
- if ((op == EOpAddAssign || op == EOpSubAssign) && left->getBasicType() == EbtReference &&
+ if ((op == EOpAddAssign || op == EOpSubAssign) && left->isReference() &&
extensionRequested(E_GL_EXT_buffer_reference2)) {
if (!(right->getType().isScalar() && right->getType().isIntegerDomain()))
@@ -359,7 +359,7 @@ TIntermTyped* TIntermediate::addUnaryMath(TOperator op, TIntermTyped* child, TSo
switch (op) {
case EOpLogicalNot:
- if (source == EShSourceHlsl) {
+ if (getSource() == EShSourceHlsl) {
break; // HLSL can promote logical not
}
@@ -383,18 +383,20 @@ TIntermTyped* TIntermediate::addUnaryMath(TOperator op, TIntermTyped* child, TSo
//
TBasicType newType = EbtVoid;
switch (op) {
- case EOpConstructInt8: newType = EbtInt8; break;
- case EOpConstructUint8: newType = EbtUint8; break;
- case EOpConstructInt16: newType = EbtInt16; break;
- case EOpConstructUint16: newType = EbtUint16; break;
+ case EOpConstructBool: newType = EbtBool; break;
+ case EOpConstructFloat: newType = EbtFloat; break;
case EOpConstructInt: newType = EbtInt; break;
case EOpConstructUint: newType = EbtUint; break;
+#ifndef GLSLANG_WEB
+ case EOpConstructInt8: newType = EbtInt8; break;
+ case EOpConstructUint8: newType = EbtUint8; break;
+ case EOpConstructInt16: newType = EbtInt16; break;
+ case EOpConstructUint16: newType = EbtUint16; break;
case EOpConstructInt64: newType = EbtInt64; break;
case EOpConstructUint64: newType = EbtUint64; break;
- case EOpConstructBool: newType = EbtBool; break;
- case EOpConstructFloat: newType = EbtFloat; break;
case EOpConstructDouble: newType = EbtDouble; break;
case EOpConstructFloat16: newType = EbtFloat16; break;
+#endif
default: break; // some compilers want this
}
@@ -449,7 +451,7 @@ TIntermTyped* TIntermediate::addUnaryMath(TOperator op, TIntermTyped* child, TSo
node->getWritableType().getQualifier().makeSpecConstant();
// If must propagate nonuniform, make a nonuniform.
- if (node->getOperand()->getQualifier().nonUniform && isNonuniformPropagating(node->getOp()))
+ if (node->getOperand()->getQualifier().isNonUniform() && isNonuniformPropagating(node->getOp()))
node->getWritableType().getQualifier().nonUniform = true;
return node;
@@ -536,15 +538,13 @@ bool TIntermediate::isConversionAllowed(TOperator op, TIntermTyped* node) const
return false;
case EbtAtomicUint:
case EbtSampler:
-#ifdef NV_EXTENSIONS
case EbtAccStructNV:
-#endif
// opaque types can be passed to functions
if (op == EOpFunction)
break;
// HLSL can assign samplers directly (no constructor)
- if (source == EShSourceHlsl && node->getBasicType() == EbtSampler)
+ if (getSource() == EShSourceHlsl && node->getBasicType() == EbtSampler)
break;
// samplers can get assigned via a sampler constructor
@@ -562,107 +562,50 @@ bool TIntermediate::isConversionAllowed(TOperator op, TIntermTyped* node) const
return true;
}
-// This is 'mechanism' here, it does any conversion told.
-// It is about basic type, not about shape.
-// The policy comes from the shader or the calling code.
-TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped* node) const
+bool TIntermediate::buildConvertOp(TBasicType dst, TBasicType src, TOperator& newOp) const
{
- //
- // Add a new newNode for the conversion.
- //
- TIntermUnary* newNode = nullptr;
-
- TOperator newOp = EOpNull;
-
- // Certain explicit conversions are allowed conditionally
- bool arithemeticInt8Enabled = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
- extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8);
-#ifdef AMD_EXTENSIONS
- bool arithemeticInt16Enabled = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
- extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16) ||
- extensionRequested(E_GL_AMD_gpu_shader_int16);
-
- bool arithemeticFloat16Enabled = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
- extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16) ||
- extensionRequested(E_GL_AMD_gpu_shader_half_float);
-#else
- bool arithemeticInt16Enabled = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
- extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16);
-
- bool arithemeticFloat16Enabled = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
- extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16);
-#endif
- bool convertToIntTypes = (convertTo == EbtInt8 || convertTo == EbtUint8 ||
- convertTo == EbtInt16 || convertTo == EbtUint16 ||
- convertTo == EbtInt || convertTo == EbtUint ||
- convertTo == EbtInt64 || convertTo == EbtUint64);
-
- bool convertFromIntTypes = (node->getBasicType() == EbtInt8 || node->getBasicType() == EbtUint8 ||
- node->getBasicType() == EbtInt16 || node->getBasicType() == EbtUint16 ||
- node->getBasicType() == EbtInt || node->getBasicType() == EbtUint ||
- node->getBasicType() == EbtInt64 || node->getBasicType() == EbtUint64);
-
- bool convertToFloatTypes = (convertTo == EbtFloat16 || convertTo == EbtFloat || convertTo == EbtDouble);
-
- bool convertFromFloatTypes = (node->getBasicType() == EbtFloat16 ||
- node->getBasicType() == EbtFloat ||
- node->getBasicType() == EbtDouble);
-
- if (! arithemeticInt8Enabled) {
- if (((convertTo == EbtInt8 || convertTo == EbtUint8) && ! convertFromIntTypes) ||
- ((node->getBasicType() == EbtInt8 || node->getBasicType() == EbtUint8) && ! convertToIntTypes))
- return nullptr;
- }
-
- if (! arithemeticInt16Enabled) {
- if (((convertTo == EbtInt16 || convertTo == EbtUint16) && ! convertFromIntTypes) ||
- ((node->getBasicType() == EbtInt16 || node->getBasicType() == EbtUint16) && ! convertToIntTypes))
- return nullptr;
- }
-
- if (! arithemeticFloat16Enabled) {
- if ((convertTo == EbtFloat16 && ! convertFromFloatTypes) ||
- (node->getBasicType() == EbtFloat16 && ! convertToFloatTypes))
- return nullptr;
- }
-
- switch (convertTo) {
+ switch (dst) {
+#ifndef GLSLANG_WEB
case EbtDouble:
- switch (node->getBasicType()) {
+ switch (src) {
+ case EbtUint: newOp = EOpConvUintToDouble; break;
+ case EbtBool: newOp = EOpConvBoolToDouble; break;
+ case EbtFloat: newOp = EOpConvFloatToDouble; break;
+ case EbtInt: newOp = EOpConvIntToDouble; break;
case EbtInt8: newOp = EOpConvInt8ToDouble; break;
case EbtUint8: newOp = EOpConvUint8ToDouble; break;
case EbtInt16: newOp = EOpConvInt16ToDouble; break;
case EbtUint16: newOp = EOpConvUint16ToDouble; break;
- case EbtInt: newOp = EOpConvIntToDouble; break;
- case EbtUint: newOp = EOpConvUintToDouble; break;
- case EbtBool: newOp = EOpConvBoolToDouble; break;
- case EbtFloat: newOp = EOpConvFloatToDouble; break;
case EbtFloat16: newOp = EOpConvFloat16ToDouble; break;
case EbtInt64: newOp = EOpConvInt64ToDouble; break;
case EbtUint64: newOp = EOpConvUint64ToDouble; break;
default:
- return nullptr;
+ return false;
}
break;
+#endif
case EbtFloat:
- switch (node->getBasicType()) {
- case EbtInt8: newOp = EOpConvInt8ToFloat; break;
- case EbtUint8: newOp = EOpConvUint8ToFloat; break;
- case EbtInt16: newOp = EOpConvInt16ToFloat; break;
- case EbtUint16: newOp = EOpConvUint16ToFloat; break;
+ switch (src) {
case EbtInt: newOp = EOpConvIntToFloat; break;
case EbtUint: newOp = EOpConvUintToFloat; break;
case EbtBool: newOp = EOpConvBoolToFloat; break;
+#ifndef GLSLANG_WEB
case EbtDouble: newOp = EOpConvDoubleToFloat; break;
+ case EbtInt8: newOp = EOpConvInt8ToFloat; break;
+ case EbtUint8: newOp = EOpConvUint8ToFloat; break;
+ case EbtInt16: newOp = EOpConvInt16ToFloat; break;
+ case EbtUint16: newOp = EOpConvUint16ToFloat; break;
case EbtFloat16: newOp = EOpConvFloat16ToFloat; break;
case EbtInt64: newOp = EOpConvInt64ToFloat; break;
case EbtUint64: newOp = EOpConvUint64ToFloat; break;
+#endif
default:
- return nullptr;
+ return false;
}
break;
+#ifndef GLSLANG_WEB
case EbtFloat16:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtInt8: newOp = EOpConvInt8ToFloat16; break;
case EbtUint8: newOp = EOpConvUint8ToFloat16; break;
case EbtInt16: newOp = EOpConvInt16ToFloat16; break;
@@ -675,28 +618,32 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtInt64: newOp = EOpConvInt64ToFloat16; break;
case EbtUint64: newOp = EOpConvUint64ToFloat16; break;
default:
- return nullptr;
+ return false;
}
break;
+#endif
case EbtBool:
- switch (node->getBasicType()) {
- case EbtInt8: newOp = EOpConvInt8ToBool; break;
- case EbtUint8: newOp = EOpConvUint8ToBool; break;
- case EbtInt16: newOp = EOpConvInt16ToBool; break;
- case EbtUint16: newOp = EOpConvUint16ToBool; break;
+ switch (src) {
case EbtInt: newOp = EOpConvIntToBool; break;
case EbtUint: newOp = EOpConvUintToBool; break;
case EbtFloat: newOp = EOpConvFloatToBool; break;
+#ifndef GLSLANG_WEB
case EbtDouble: newOp = EOpConvDoubleToBool; break;
+ case EbtInt8: newOp = EOpConvInt8ToBool; break;
+ case EbtUint8: newOp = EOpConvUint8ToBool; break;
+ case EbtInt16: newOp = EOpConvInt16ToBool; break;
+ case EbtUint16: newOp = EOpConvUint16ToBool; break;
case EbtFloat16: newOp = EOpConvFloat16ToBool; break;
case EbtInt64: newOp = EOpConvInt64ToBool; break;
case EbtUint64: newOp = EOpConvUint64ToBool; break;
+#endif
default:
- return nullptr;
+ return false;
}
break;
+#ifndef GLSLANG_WEB
case EbtInt8:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtUint8: newOp = EOpConvUint8ToInt8; break;
case EbtInt16: newOp = EOpConvInt16ToInt8; break;
case EbtUint16: newOp = EOpConvUint16ToInt8; break;
@@ -709,11 +656,11 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtDouble: newOp = EOpConvDoubleToInt8; break;
case EbtFloat16: newOp = EOpConvFloat16ToInt8; break;
default:
- return nullptr;
+ return false;
}
break;
case EbtUint8:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtInt8: newOp = EOpConvInt8ToUint8; break;
case EbtInt16: newOp = EOpConvInt16ToUint8; break;
case EbtUint16: newOp = EOpConvUint16ToUint8; break;
@@ -726,12 +673,12 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtDouble: newOp = EOpConvDoubleToUint8; break;
case EbtFloat16: newOp = EOpConvFloat16ToUint8; break;
default:
- return nullptr;
+ return false;
}
break;
case EbtInt16:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtUint8: newOp = EOpConvUint8ToInt16; break;
case EbtInt8: newOp = EOpConvInt8ToInt16; break;
case EbtUint16: newOp = EOpConvUint16ToInt16; break;
@@ -744,11 +691,11 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtDouble: newOp = EOpConvDoubleToInt16; break;
case EbtFloat16: newOp = EOpConvFloat16ToInt16; break;
default:
- return nullptr;
+ return false;
}
break;
case EbtUint16:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtInt8: newOp = EOpConvInt8ToUint16; break;
case EbtUint8: newOp = EOpConvUint8ToUint16; break;
case EbtInt16: newOp = EOpConvInt16ToUint16; break;
@@ -761,46 +708,52 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtDouble: newOp = EOpConvDoubleToUint16; break;
case EbtFloat16: newOp = EOpConvFloat16ToUint16; break;
default:
- return nullptr;
+ return false;
}
break;
+#endif
case EbtInt:
- switch (node->getBasicType()) {
+ switch (src) {
+ case EbtUint: newOp = EOpConvUintToInt; break;
+ case EbtBool: newOp = EOpConvBoolToInt; break;
+ case EbtFloat: newOp = EOpConvFloatToInt; break;
+#ifndef GLSLANG_WEB
case EbtInt8: newOp = EOpConvInt8ToInt; break;
case EbtUint8: newOp = EOpConvUint8ToInt; break;
case EbtInt16: newOp = EOpConvInt16ToInt; break;
case EbtUint16: newOp = EOpConvUint16ToInt; break;
- case EbtUint: newOp = EOpConvUintToInt; break;
- case EbtBool: newOp = EOpConvBoolToInt; break;
- case EbtFloat: newOp = EOpConvFloatToInt; break;
case EbtDouble: newOp = EOpConvDoubleToInt; break;
case EbtFloat16: newOp = EOpConvFloat16ToInt; break;
case EbtInt64: newOp = EOpConvInt64ToInt; break;
case EbtUint64: newOp = EOpConvUint64ToInt; break;
+#endif
default:
- return nullptr;
+ return false;
}
break;
case EbtUint:
- switch (node->getBasicType()) {
+ switch (src) {
+ case EbtInt: newOp = EOpConvIntToUint; break;
+ case EbtBool: newOp = EOpConvBoolToUint; break;
+ case EbtFloat: newOp = EOpConvFloatToUint; break;
+#ifndef GLSLANG_WEB
case EbtInt8: newOp = EOpConvInt8ToUint; break;
case EbtUint8: newOp = EOpConvUint8ToUint; break;
case EbtInt16: newOp = EOpConvInt16ToUint; break;
case EbtUint16: newOp = EOpConvUint16ToUint; break;
- case EbtInt: newOp = EOpConvIntToUint; break;
- case EbtBool: newOp = EOpConvBoolToUint; break;
- case EbtFloat: newOp = EOpConvFloatToUint; break;
case EbtDouble: newOp = EOpConvDoubleToUint; break;
case EbtFloat16: newOp = EOpConvFloat16ToUint; break;
case EbtInt64: newOp = EOpConvInt64ToUint; break;
case EbtUint64: newOp = EOpConvUint64ToUint; break;
+#endif
default:
- return nullptr;
+ return false;
}
break;
+#ifndef GLSLANG_WEB
case EbtInt64:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtInt8: newOp = EOpConvInt8ToInt64; break;
case EbtUint8: newOp = EOpConvUint8ToInt64; break;
case EbtInt16: newOp = EOpConvInt16ToInt64; break;
@@ -813,11 +766,11 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtFloat16: newOp = EOpConvFloat16ToInt64; break;
case EbtUint64: newOp = EOpConvUint64ToInt64; break;
default:
- return nullptr;
+ return false;
}
break;
case EbtUint64:
- switch (node->getBasicType()) {
+ switch (src) {
case EbtInt8: newOp = EOpConvInt8ToUint64; break;
case EbtUint8: newOp = EOpConvUint8ToUint64; break;
case EbtInt16: newOp = EOpConvInt16ToUint64; break;
@@ -830,10 +783,64 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
case EbtFloat16: newOp = EOpConvFloat16ToUint64; break;
case EbtInt64: newOp = EOpConvInt64ToUint64; break;
default:
- return nullptr;
+ return false;
}
break;
+#endif
default:
+ return false;
+ }
+ return true;
+}
+
+// This is 'mechanism' here, it does any conversion told.
+// It is about basic type, not about shape.
+// The policy comes from the shader or the calling code.
+TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped* node) const
+{
+ //
+ // Add a new newNode for the conversion.
+ //
+
+#ifndef GLSLANG_WEB
+ bool convertToIntTypes = (convertTo == EbtInt8 || convertTo == EbtUint8 ||
+ convertTo == EbtInt16 || convertTo == EbtUint16 ||
+ convertTo == EbtInt || convertTo == EbtUint ||
+ convertTo == EbtInt64 || convertTo == EbtUint64);
+
+ bool convertFromIntTypes = (node->getBasicType() == EbtInt8 || node->getBasicType() == EbtUint8 ||
+ node->getBasicType() == EbtInt16 || node->getBasicType() == EbtUint16 ||
+ node->getBasicType() == EbtInt || node->getBasicType() == EbtUint ||
+ node->getBasicType() == EbtInt64 || node->getBasicType() == EbtUint64);
+
+ bool convertToFloatTypes = (convertTo == EbtFloat16 || convertTo == EbtFloat || convertTo == EbtDouble);
+
+ bool convertFromFloatTypes = (node->getBasicType() == EbtFloat16 ||
+ node->getBasicType() == EbtFloat ||
+ node->getBasicType() == EbtDouble);
+
+ if (! getArithemeticInt8Enabled()) {
+ if (((convertTo == EbtInt8 || convertTo == EbtUint8) && ! convertFromIntTypes) ||
+ ((node->getBasicType() == EbtInt8 || node->getBasicType() == EbtUint8) && ! convertToIntTypes))
+ return nullptr;
+ }
+
+ if (! getArithemeticInt16Enabled()) {
+ if (((convertTo == EbtInt16 || convertTo == EbtUint16) && ! convertFromIntTypes) ||
+ ((node->getBasicType() == EbtInt16 || node->getBasicType() == EbtUint16) && ! convertToIntTypes))
+ return nullptr;
+ }
+
+ if (! getArithemeticFloat16Enabled()) {
+ if ((convertTo == EbtFloat16 && ! convertFromFloatTypes) ||
+ (node->getBasicType() == EbtFloat16 && ! convertToFloatTypes))
+ return nullptr;
+ }
+#endif
+
+ TIntermUnary* newNode = nullptr;
+ TOperator newOp = EOpNull;
+ if (!buildConvertOp(convertTo, node->getBasicType(), newOp)) {
return nullptr;
}
@@ -841,9 +848,18 @@ TIntermTyped* TIntermediate::createConversion(TBasicType convertTo, TIntermTyped
newNode = addUnaryNode(newOp, node, node->getLoc(), newType);
if (node->getAsConstantUnion()) {
- TIntermTyped* folded = node->getAsConstantUnion()->fold(newOp, newType);
- if (folded)
- return folded;
+#ifndef GLSLANG_WEB
+ // 8/16-bit storage extensions don't support 8/16-bit constants, so don't fold conversions
+ // to those types
+ if ((getArithemeticInt8Enabled() || !(convertTo == EbtInt8 || convertTo == EbtUint8)) &&
+ (getArithemeticInt16Enabled() || !(convertTo == EbtInt16 || convertTo == EbtUint16)) &&
+ (getArithemeticFloat16Enabled() || !(convertTo == EbtFloat16)))
+#endif
+ {
+ TIntermTyped* folded = node->getAsConstantUnion()->fold(newOp, newType);
+ if (folded)
+ return folded;
+ }
}
// Propagate specialization-constant-ness, if allowed
@@ -933,7 +949,7 @@ TIntermediate::addConversion(TOperator op, TIntermTyped* node0, TIntermTyped* no
case EOpLogicalAnd:
case EOpLogicalOr:
case EOpLogicalXor:
- if (source == EShSourceHlsl)
+ if (getSource() == EShSourceHlsl)
promoteTo = std::make_tuple(EbtBool, EbtBool);
else
return std::make_tuple(node0, node1);
@@ -944,7 +960,7 @@ TIntermediate::addConversion(TOperator op, TIntermTyped* node0, TIntermTyped* no
// HLSL can promote bools to ints to make this work.
case EOpLeftShift:
case EOpRightShift:
- if (source == EShSourceHlsl) {
+ if (getSource() == EShSourceHlsl) {
TBasicType node0BasicType = node0->getBasicType();
if (node0BasicType == EbtBool)
node0BasicType = EbtInt;
@@ -1039,6 +1055,13 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt
case EOpConstructFloat:
promoteTo = EbtFloat;
break;
+ case EOpConstructInt:
+ promoteTo = EbtInt;
+ break;
+ case EOpConstructUint:
+ promoteTo = EbtUint;
+ break;
+#ifndef GLSLANG_WEB
case EOpConstructDouble:
promoteTo = EbtDouble;
break;
@@ -1067,18 +1090,13 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt
canPromoteConstant = extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16);
break;
- case EOpConstructInt:
- promoteTo = EbtInt;
- break;
- case EOpConstructUint:
- promoteTo = EbtUint;
- break;
case EOpConstructInt64:
promoteTo = EbtInt64;
break;
case EOpConstructUint64:
promoteTo = EbtUint64;
break;
+#endif
case EOpLogicalNot:
@@ -1111,6 +1129,7 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt
case EOpLit:
case EOpMax:
case EOpMin:
+ case EOpMod:
case EOpModf:
case EOpPow:
case EOpReflect:
@@ -1122,7 +1141,7 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt
case EOpConstructStruct:
case EOpConstructCooperativeMatrix:
- if (type.getBasicType() == EbtReference || node->getType().getBasicType() == EbtReference) {
+ if (type.isReference() || node->getType().isReference()) {
// types must match to assign a reference
if (type == node->getType())
return node;
@@ -1145,7 +1164,7 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt
case EOpLeftShiftAssign:
case EOpRightShiftAssign:
{
- if (source == EShSourceHlsl && node->getType().getBasicType() == EbtBool)
+ if (getSource() == EShSourceHlsl && node->getType().getBasicType() == EbtBool)
promoteTo = type.getBasicType();
else {
if (isTypeInt(type.getBasicType()) && isTypeInt(node->getBasicType()))
@@ -1191,7 +1210,7 @@ TIntermTyped* TIntermediate::addConversion(TOperator op, const TType& type, TInt
TIntermTyped* TIntermediate::addUniShapeConversion(TOperator op, const TType& type, TIntermTyped* node)
{
// some source languages don't do this
- switch (source) {
+ switch (getSource()) {
case EShSourceHlsl:
break;
case EShSourceGlsl:
@@ -1244,7 +1263,7 @@ TIntermTyped* TIntermediate::addUniShapeConversion(TOperator op, const TType& ty
void TIntermediate::addBiShapeConversion(TOperator op, TIntermTyped*& lhsNode, TIntermTyped*& rhsNode)
{
// some source languages don't do this
- switch (source) {
+ switch (getSource()) {
case EShSourceHlsl:
break;
case EShSourceGlsl:
@@ -1347,7 +1366,7 @@ TIntermTyped* TIntermediate::addShapeConversion(const TType& type, TIntermTyped*
// The new node that handles the conversion
TOperator constructorOp = mapTypeToConstructorOp(type);
- if (source == EShSourceHlsl) {
+ if (getSource() == EShSourceHlsl) {
// HLSL rules for scalar, vector and matrix conversions:
// 1) scalar can become anything, initializing every component with its value
// 2) vector and matrix can become scalar, first element is used (warning: truncation)
@@ -1460,13 +1479,15 @@ bool TIntermediate::isFPPromotion(TBasicType from, TBasicType to) const
bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
{
+#ifdef GLSLANG_WEB
+ return false;
+#endif
+
switch (from) {
- case EbtInt8:
- switch (to) {
- case EbtUint8:
- case EbtInt16:
- case EbtUint16:
+ case EbtInt:
+ switch(to) {
case EbtUint:
+ return version >= 400 || getSource() == EShSourceHlsl;
case EbtInt64:
case EbtUint64:
return true;
@@ -1474,11 +1495,8 @@ bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
break;
}
break;
- case EbtUint8:
- switch (to) {
- case EbtInt16:
- case EbtUint16:
- case EbtUint:
+ case EbtUint:
+ switch(to) {
case EbtInt64:
case EbtUint64:
return true;
@@ -1486,8 +1504,10 @@ bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
break;
}
break;
- case EbtInt16:
- switch(to) {
+ case EbtInt8:
+ switch (to) {
+ case EbtUint8:
+ case EbtInt16:
case EbtUint16:
case EbtUint:
case EbtInt64:
@@ -1497,8 +1517,10 @@ bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
break;
}
break;
- case EbtUint16:
- switch(to) {
+ case EbtUint8:
+ switch (to) {
+ case EbtInt16:
+ case EbtUint16:
case EbtUint:
case EbtInt64:
case EbtUint64:
@@ -1507,10 +1529,10 @@ bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
break;
}
break;
- case EbtInt:
+ case EbtInt16:
switch(to) {
+ case EbtUint16:
case EbtUint:
- return version >= 400 || (source == EShSourceHlsl);
case EbtInt64:
case EbtUint64:
return true;
@@ -1518,8 +1540,9 @@ bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
break;
}
break;
- case EbtUint:
+ case EbtUint16:
switch(to) {
+ case EbtUint:
case EbtInt64:
case EbtUint64:
return true;
@@ -1540,6 +1563,10 @@ bool TIntermediate::isIntegralConversion(TBasicType from, TBasicType to) const
bool TIntermediate::isFPConversion(TBasicType from, TBasicType to) const
{
+#ifdef GLSLANG_WEB
+ return false;
+#endif
+
if (to == EbtFloat && from == EbtFloat16) {
return true;
} else {
@@ -1550,12 +1577,9 @@ bool TIntermediate::isFPConversion(TBasicType from, TBasicType to) const
bool TIntermediate::isFPIntegralConversion(TBasicType from, TBasicType to) const
{
switch (from) {
- case EbtInt8:
- case EbtUint8:
- case EbtInt16:
- case EbtUint16:
- switch (to) {
- case EbtFloat16:
+ case EbtInt:
+ case EbtUint:
+ switch(to) {
case EbtFloat:
case EbtDouble:
return true;
@@ -1563,9 +1587,13 @@ bool TIntermediate::isFPIntegralConversion(TBasicType from, TBasicType to) const
break;
}
break;
- case EbtInt:
- case EbtUint:
- switch(to) {
+#ifndef GLSLANG_WEB
+ case EbtInt8:
+ case EbtUint8:
+ case EbtInt16:
+ case EbtUint16:
+ switch (to) {
+ case EbtFloat16:
case EbtFloat:
case EbtDouble:
return true;
@@ -1579,7 +1607,7 @@ bool TIntermediate::isFPIntegralConversion(TBasicType from, TBasicType to) const
return true;
}
break;
-
+#endif
default:
break;
}
@@ -1592,7 +1620,7 @@ bool TIntermediate::isFPIntegralConversion(TBasicType from, TBasicType to) const
//
bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperator op) const
{
- if (profile == EEsProfile || version == 110)
+ if (isEsProfile() || version == 110)
return false;
if (from == to)
@@ -1600,7 +1628,7 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
// TODO: Move more policies into language-specific handlers.
// Some languages allow more general (or potentially, more specific) conversions under some conditions.
- if (source == EShSourceHlsl) {
+ if (getSource() == EShSourceHlsl) {
const bool fromConvertable = (from == EbtFloat || from == EbtDouble || from == EbtInt || from == EbtUint || from == EbtBool);
const bool toConvertable = (to == EbtFloat || to == EbtDouble || to == EbtInt || to == EbtUint || to == EbtBool);
@@ -1667,7 +1695,7 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
}
// hlsl supported conversions
- if (source == EShSourceHlsl) {
+ if (getSource() == EShSourceHlsl) {
if (from == EbtBool && (to == EbtInt || to == EbtUint || to == EbtFloat))
return true;
}
@@ -1682,13 +1710,11 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
case EbtFloat:
case EbtDouble:
return true;
-#ifdef AMD_EXTENSIONS
case EbtInt16:
case EbtUint16:
return extensionRequested(E_GL_AMD_gpu_shader_int16);
case EbtFloat16:
return extensionRequested(E_GL_AMD_gpu_shader_half_float);
-#endif
default:
return false;
}
@@ -1699,34 +1725,28 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
case EbtFloat:
return true;
case EbtBool:
- return (source == EShSourceHlsl);
-#ifdef AMD_EXTENSIONS
+ return getSource() == EShSourceHlsl;
case EbtInt16:
case EbtUint16:
return extensionRequested(E_GL_AMD_gpu_shader_int16);
-#endif
case EbtFloat16:
return
-#ifdef AMD_EXTENSIONS
extensionRequested(E_GL_AMD_gpu_shader_half_float) ||
-#endif
- (source == EShSourceHlsl);
+ getSource() == EShSourceHlsl;
default:
return false;
}
case EbtUint:
switch (from) {
case EbtInt:
- return version >= 400 || (source == EShSourceHlsl);
+ return version >= 400 || getSource() == EShSourceHlsl;
case EbtUint:
return true;
case EbtBool:
- return (source == EShSourceHlsl);
-#ifdef AMD_EXTENSIONS
+ return getSource() == EShSourceHlsl;
case EbtInt16:
case EbtUint16:
return extensionRequested(E_GL_AMD_gpu_shader_int16);
-#endif
default:
return false;
}
@@ -1735,11 +1755,9 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
case EbtInt:
return true;
case EbtBool:
- return (source == EShSourceHlsl);
-#ifdef AMD_EXTENSIONS
+ return getSource() == EShSourceHlsl;
case EbtInt16:
return extensionRequested(E_GL_AMD_gpu_shader_int16);
-#endif
default:
return false;
}
@@ -1750,11 +1768,9 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
case EbtInt64:
case EbtUint64:
return true;
-#ifdef AMD_EXTENSIONS
case EbtInt16:
case EbtUint16:
return extensionRequested(E_GL_AMD_gpu_shader_int16);
-#endif
default:
return false;
}
@@ -1763,15 +1779,12 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
case EbtInt:
case EbtInt64:
return true;
-#ifdef AMD_EXTENSIONS
case EbtInt16:
return extensionRequested(E_GL_AMD_gpu_shader_int16);
-#endif
default:
return false;
}
case EbtFloat16:
-#ifdef AMD_EXTENSIONS
switch (from) {
case EbtInt16:
case EbtUint16:
@@ -1781,10 +1794,8 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
default:
break;
}
-#endif
return false;
case EbtUint16:
-#ifdef AMD_EXTENSIONS
switch (from) {
case EbtInt16:
case EbtUint16:
@@ -1792,7 +1803,6 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
default:
break;
}
-#endif
return false;
default:
return false;
@@ -1802,7 +1812,12 @@ bool TIntermediate::canImplicitlyPromote(TBasicType from, TBasicType to, TOperat
return false;
}
-static bool canSignedIntTypeRepresentAllUnsignedValues(TBasicType sintType, TBasicType uintType) {
+static bool canSignedIntTypeRepresentAllUnsignedValues(TBasicType sintType, TBasicType uintType)
+{
+#ifdef GLSLANG_WEB
+ return false;
+#endif
+
switch(sintType) {
case EbtInt8:
switch(uintType) {
@@ -1861,7 +1876,13 @@ static bool canSignedIntTypeRepresentAllUnsignedValues(TBasicType sintType, TBas
}
-static TBasicType getCorrespondingUnsignedType(TBasicType type) {
+static TBasicType getCorrespondingUnsignedType(TBasicType type)
+{
+#ifdef GLSLANG_WEB
+ assert(type == EbtInt);
+ return EbtUint;
+#endif
+
switch(type) {
case EbtInt8:
return EbtUint8;
@@ -1910,10 +1931,10 @@ std::tuple<TBasicType, TBasicType> TIntermediate::getConversionDestinatonType(TB
TBasicType res0 = EbtNumTypes;
TBasicType res1 = EbtNumTypes;
- if (profile == EEsProfile || version == 110)
- return std::make_tuple(res0, res1);;
+ if (isEsProfile() || version == 110)
+ return std::make_tuple(res0, res1);
- if (source == EShSourceHlsl) {
+ if (getSource() == EShSourceHlsl) {
if (canImplicitlyPromote(type1, type0, op)) {
res0 = type0;
res1 = type0;
@@ -1982,7 +2003,7 @@ TOperator TIntermediate::mapTypeToConstructorOp(const TType& type) const
{
TOperator op = EOpNull;
- if (type.getQualifier().nonUniform)
+ if (type.getQualifier().isNonUniform())
return EOpConstructNonuniform;
if (type.isCoopMat())
@@ -1993,7 +2014,7 @@ TOperator TIntermediate::mapTypeToConstructorOp(const TType& type) const
op = EOpConstructStruct;
break;
case EbtSampler:
- if (type.getSampler().combined)
+ if (type.getSampler().isCombined())
op = EOpConstructTextureSampler;
break;
case EbtFloat:
@@ -2035,6 +2056,121 @@ TOperator TIntermediate::mapTypeToConstructorOp(const TType& type) const
}
}
break;
+ case EbtInt:
+ if (type.getMatrixCols()) {
+ switch (type.getMatrixCols()) {
+ case 2:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructIMat2x2; break;
+ case 3: op = EOpConstructIMat2x3; break;
+ case 4: op = EOpConstructIMat2x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ case 3:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructIMat3x2; break;
+ case 3: op = EOpConstructIMat3x3; break;
+ case 4: op = EOpConstructIMat3x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ case 4:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructIMat4x2; break;
+ case 3: op = EOpConstructIMat4x3; break;
+ case 4: op = EOpConstructIMat4x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ }
+ } else {
+ switch(type.getVectorSize()) {
+ case 1: op = EOpConstructInt; break;
+ case 2: op = EOpConstructIVec2; break;
+ case 3: op = EOpConstructIVec3; break;
+ case 4: op = EOpConstructIVec4; break;
+ default: break; // some compilers want this
+ }
+ }
+ break;
+ case EbtUint:
+ if (type.getMatrixCols()) {
+ switch (type.getMatrixCols()) {
+ case 2:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructUMat2x2; break;
+ case 3: op = EOpConstructUMat2x3; break;
+ case 4: op = EOpConstructUMat2x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ case 3:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructUMat3x2; break;
+ case 3: op = EOpConstructUMat3x3; break;
+ case 4: op = EOpConstructUMat3x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ case 4:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructUMat4x2; break;
+ case 3: op = EOpConstructUMat4x3; break;
+ case 4: op = EOpConstructUMat4x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ }
+ } else {
+ switch(type.getVectorSize()) {
+ case 1: op = EOpConstructUint; break;
+ case 2: op = EOpConstructUVec2; break;
+ case 3: op = EOpConstructUVec3; break;
+ case 4: op = EOpConstructUVec4; break;
+ default: break; // some compilers want this
+ }
+ }
+ break;
+ case EbtBool:
+ if (type.getMatrixCols()) {
+ switch (type.getMatrixCols()) {
+ case 2:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructBMat2x2; break;
+ case 3: op = EOpConstructBMat2x3; break;
+ case 4: op = EOpConstructBMat2x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ case 3:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructBMat3x2; break;
+ case 3: op = EOpConstructBMat3x3; break;
+ case 4: op = EOpConstructBMat3x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ case 4:
+ switch (type.getMatrixRows()) {
+ case 2: op = EOpConstructBMat4x2; break;
+ case 3: op = EOpConstructBMat4x3; break;
+ case 4: op = EOpConstructBMat4x4; break;
+ default: break; // some compilers want this
+ }
+ break;
+ }
+ } else {
+ switch(type.getVectorSize()) {
+ case 1: op = EOpConstructBool; break;
+ case 2: op = EOpConstructBVec2; break;
+ case 3: op = EOpConstructBVec3; break;
+ case 4: op = EOpConstructBVec4; break;
+ default: break; // some compilers want this
+ }
+ }
+ break;
+#ifndef GLSLANG_WEB
case EbtDouble:
if (type.getMatrixCols()) {
switch (type.getMatrixCols()) {
@@ -2148,82 +2284,6 @@ TOperator TIntermediate::mapTypeToConstructorOp(const TType& type) const
default: break; // some compilers want this
}
break;
- case EbtInt:
- if (type.getMatrixCols()) {
- switch (type.getMatrixCols()) {
- case 2:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructIMat2x2; break;
- case 3: op = EOpConstructIMat2x3; break;
- case 4: op = EOpConstructIMat2x4; break;
- default: break; // some compilers want this
- }
- break;
- case 3:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructIMat3x2; break;
- case 3: op = EOpConstructIMat3x3; break;
- case 4: op = EOpConstructIMat3x4; break;
- default: break; // some compilers want this
- }
- break;
- case 4:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructIMat4x2; break;
- case 3: op = EOpConstructIMat4x3; break;
- case 4: op = EOpConstructIMat4x4; break;
- default: break; // some compilers want this
- }
- break;
- }
- } else {
- switch(type.getVectorSize()) {
- case 1: op = EOpConstructInt; break;
- case 2: op = EOpConstructIVec2; break;
- case 3: op = EOpConstructIVec3; break;
- case 4: op = EOpConstructIVec4; break;
- default: break; // some compilers want this
- }
- }
- break;
- case EbtUint:
- if (type.getMatrixCols()) {
- switch (type.getMatrixCols()) {
- case 2:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructUMat2x2; break;
- case 3: op = EOpConstructUMat2x3; break;
- case 4: op = EOpConstructUMat2x4; break;
- default: break; // some compilers want this
- }
- break;
- case 3:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructUMat3x2; break;
- case 3: op = EOpConstructUMat3x3; break;
- case 4: op = EOpConstructUMat3x4; break;
- default: break; // some compilers want this
- }
- break;
- case 4:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructUMat4x2; break;
- case 3: op = EOpConstructUMat4x3; break;
- case 4: op = EOpConstructUMat4x4; break;
- default: break; // some compilers want this
- }
- break;
- }
- } else {
- switch(type.getVectorSize()) {
- case 1: op = EOpConstructUint; break;
- case 2: op = EOpConstructUVec2; break;
- case 3: op = EOpConstructUVec3; break;
- case 4: op = EOpConstructUVec4; break;
- default: break; // some compilers want this
- }
- }
- break;
case EbtInt64:
switch(type.getVectorSize()) {
case 1: op = EOpConstructInt64; break;
@@ -2242,47 +2302,10 @@ TOperator TIntermediate::mapTypeToConstructorOp(const TType& type) const
default: break; // some compilers want this
}
break;
- case EbtBool:
- if (type.getMatrixCols()) {
- switch (type.getMatrixCols()) {
- case 2:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructBMat2x2; break;
- case 3: op = EOpConstructBMat2x3; break;
- case 4: op = EOpConstructBMat2x4; break;
- default: break; // some compilers want this
- }
- break;
- case 3:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructBMat3x2; break;
- case 3: op = EOpConstructBMat3x3; break;
- case 4: op = EOpConstructBMat3x4; break;
- default: break; // some compilers want this
- }
- break;
- case 4:
- switch (type.getMatrixRows()) {
- case 2: op = EOpConstructBMat4x2; break;
- case 3: op = EOpConstructBMat4x3; break;
- case 4: op = EOpConstructBMat4x4; break;
- default: break; // some compilers want this
- }
- break;
- }
- } else {
- switch(type.getVectorSize()) {
- case 1: op = EOpConstructBool; break;
- case 2: op = EOpConstructBVec2; break;
- case 3: op = EOpConstructBVec3; break;
- case 4: op = EOpConstructBVec4; break;
- default: break; // some compilers want this
- }
- }
- break;
case EbtReference:
op = EOpConstructReference;
break;
+#endif
default:
break;
}
@@ -2741,6 +2764,7 @@ bool TIntermediate::postProcess(TIntermNode* root, EShLanguage /*language*/)
if (aggRoot && aggRoot->getOp() == EOpNull)
aggRoot->setOperator(EOpSequence);
+#ifndef GLSLANG_WEB
// Propagate 'noContraction' label in backward from 'precise' variables.
glslang::PropagateNoContraction(*this);
@@ -2751,6 +2775,7 @@ bool TIntermediate::postProcess(TIntermNode* root, EShLanguage /*language*/)
performTextureUpgradeAndSamplerRemovalTransformation(root);
break;
}
+#endif
return true;
}
@@ -3788,217 +3813,54 @@ TIntermTyped* TIntermediate::promoteConstantUnion(TBasicType promoteTo, TIntermC
TConstUnionArray leftUnionArray(size);
for (int i=0; i < size; i++) {
- switch (promoteTo) {
- case EbtFloat:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getIConst()));
- break;
- case EbtUint:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getUConst()));
- break;
- case EbtInt64:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getI64Const()));
- break;
- case EbtUint64:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getU64Const()));
- break;
- case EbtBool:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- default:
- return node;
- }
- break;
- case EbtDouble:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getIConst()));
- break;
- case EbtUint:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getUConst()));
- break;
- case EbtInt64:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getI64Const()));
- break;
- case EbtUint64:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getU64Const()));
- break;
- case EbtBool:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- default:
- return node;
- }
- break;
- case EbtFloat16:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getIConst()));
- break;
- case EbtUint:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getUConst()));
- break;
- case EbtInt64:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getI64Const()));
- break;
- case EbtUint64:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getU64Const()));
- break;
- case EbtBool:
- leftUnionArray[i].setDConst(static_cast<double>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- default:
- return node;
- }
- break;
- case EbtInt:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- case EbtUint:
- leftUnionArray[i].setIConst(static_cast<int>(rightUnionArray[i].getUConst()));
- break;
- case EbtInt64:
- leftUnionArray[i].setIConst(static_cast<int>(rightUnionArray[i].getI64Const()));
- break;
- case EbtUint64:
- leftUnionArray[i].setIConst(static_cast<int>(rightUnionArray[i].getU64Const()));
- break;
- case EbtBool:
- leftUnionArray[i].setIConst(static_cast<int>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i].setIConst(static_cast<int>(rightUnionArray[i].getDConst()));
- break;
- default:
- return node;
- }
- break;
- case EbtUint:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setUConst(static_cast<unsigned int>(rightUnionArray[i].getIConst()));
- break;
- case EbtUint:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- case EbtInt64:
- leftUnionArray[i].setUConst(static_cast<unsigned int>(rightUnionArray[i].getI64Const()));
- break;
- case EbtUint64:
- leftUnionArray[i].setUConst(static_cast<unsigned int>(rightUnionArray[i].getU64Const()));
- break;
- case EbtBool:
- leftUnionArray[i].setUConst(static_cast<unsigned int>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i].setUConst(static_cast<unsigned int>(rightUnionArray[i].getDConst()));
- break;
- default:
- return node;
- }
- break;
- case EbtBool:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setBConst(rightUnionArray[i].getIConst() != 0);
- break;
- case EbtUint:
- leftUnionArray[i].setBConst(rightUnionArray[i].getUConst() != 0);
- break;
- case EbtInt64:
- leftUnionArray[i].setBConst(rightUnionArray[i].getI64Const() != 0);
- break;
- case EbtUint64:
- leftUnionArray[i].setBConst(rightUnionArray[i].getU64Const() != 0);
- break;
- case EbtBool:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i].setBConst(rightUnionArray[i].getDConst() != 0.0);
- break;
- default:
- return node;
- }
- break;
- case EbtInt64:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setI64Const(static_cast<long long>(rightUnionArray[i].getIConst()));
- break;
- case EbtUint:
- leftUnionArray[i].setI64Const(static_cast<long long>(rightUnionArray[i].getUConst()));
- break;
- case EbtInt64:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- case EbtUint64:
- leftUnionArray[i].setI64Const(static_cast<long long>(rightUnionArray[i].getU64Const()));
- break;
- case EbtBool:
- leftUnionArray[i].setI64Const(static_cast<long long>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i].setI64Const(static_cast<long long>(rightUnionArray[i].getDConst()));
- break;
- default:
- return node;
- }
- break;
- case EbtUint64:
- switch (node->getType().getBasicType()) {
- case EbtInt:
- leftUnionArray[i].setU64Const(static_cast<unsigned long long>(rightUnionArray[i].getIConst()));
- break;
- case EbtUint:
- leftUnionArray[i].setU64Const(static_cast<unsigned long long>(rightUnionArray[i].getUConst()));
- break;
- case EbtInt64:
- leftUnionArray[i].setU64Const(static_cast<unsigned long long>(rightUnionArray[i].getI64Const()));
- break;
- case EbtUint64:
- leftUnionArray[i] = rightUnionArray[i];
- break;
- case EbtBool:
- leftUnionArray[i].setU64Const(static_cast<unsigned long long>(rightUnionArray[i].getBConst()));
- break;
- case EbtFloat:
- case EbtDouble:
- case EbtFloat16:
- leftUnionArray[i].setU64Const(static_cast<unsigned long long>(rightUnionArray[i].getDConst()));
- break;
- default:
- return node;
- }
- break;
- default:
- return node;
+
+#define PROMOTE(Set, CType, Get) leftUnionArray[i].Set(static_cast<CType>(rightUnionArray[i].Get()))
+#define PROMOTE_TO_BOOL(Get) leftUnionArray[i].setBConst(rightUnionArray[i].Get() != 0)
+
+#ifdef GLSLANG_WEB
+#define TO_ALL(Get) \
+ switch (promoteTo) { \
+ case EbtFloat: PROMOTE(setDConst, double, Get); break; \
+ case EbtInt: PROMOTE(setIConst, int, Get); break; \
+ case EbtUint: PROMOTE(setUConst, unsigned int, Get); break; \
+ case EbtBool: PROMOTE_TO_BOOL(Get); break; \
+ default: return node; \
+ }
+#else
+#define TO_ALL(Get) \
+ switch (promoteTo) { \
+ case EbtFloat16: PROMOTE(setDConst, double, Get); break; \
+ case EbtFloat: PROMOTE(setDConst, double, Get); break; \
+ case EbtDouble: PROMOTE(setDConst, double, Get); break; \
+ case EbtInt8: PROMOTE(setI8Const, char, Get); break; \
+ case EbtInt16: PROMOTE(setI16Const, short, Get); break; \
+ case EbtInt: PROMOTE(setIConst, int, Get); break; \
+ case EbtInt64: PROMOTE(setI64Const, long long, Get); break; \
+ case EbtUint8: PROMOTE(setU8Const, unsigned char, Get); break; \
+ case EbtUint16: PROMOTE(setU16Const, unsigned short, Get); break; \
+ case EbtUint: PROMOTE(setUConst, unsigned int, Get); break; \
+ case EbtUint64: PROMOTE(setU64Const, unsigned long long, Get); break; \
+ case EbtBool: PROMOTE_TO_BOOL(Get); break; \
+ default: return node; \
+ }
+#endif
+
+ switch (node->getType().getBasicType()) {
+ case EbtFloat: TO_ALL(getDConst); break;
+ case EbtInt: TO_ALL(getIConst); break;
+ case EbtUint: TO_ALL(getUConst); break;
+ case EbtBool: TO_ALL(getBConst); break;
+#ifndef GLSLANG_WEB
+ case EbtFloat16: TO_ALL(getDConst); break;
+ case EbtDouble: TO_ALL(getDConst); break;
+ case EbtInt8: TO_ALL(getI8Const); break;
+ case EbtInt16: TO_ALL(getI16Const); break;
+ case EbtInt64: TO_ALL(getI64Const); break;
+ case EbtUint8: TO_ALL(getU8Const); break;
+ case EbtUint16: TO_ALL(getU16Const); break;
+ case EbtUint64: TO_ALL(getU64Const); break;
+#endif
+ default: return node;
}
}
@@ -4027,7 +3889,7 @@ bool TIntermediate::specConstantPropagates(const TIntermTyped& node1, const TInt
struct TextureUpgradeAndSamplerRemovalTransform : public TIntermTraverser {
void visitSymbol(TIntermSymbol* symbol) override {
if (symbol->getBasicType() == EbtSampler && symbol->getType().getSampler().isTexture()) {
- symbol->getWritableType().getSampler().combined = true;
+ symbol->getWritableType().getSampler().setCombined(true);
}
}
bool visitAggregate(TVisit, TIntermAggregate* ag) override {
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp b/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp
index c9ddaeadb0..282ecca0e0 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/ParseContextBase.cpp
@@ -67,6 +67,8 @@ void TParseContextBase::outputMessage(const TSourceLoc& loc, const char* szReaso
}
}
+#if !defined(GLSLANG_WEB) || defined(GLSLANG_WEB_DEVEL)
+
void C_DECL TParseContextBase::error(const TSourceLoc& loc, const char* szReason, const char* szToken,
const char* szExtraInfoFormat, ...)
{
@@ -113,6 +115,8 @@ void C_DECL TParseContextBase::ppWarn(const TSourceLoc& loc, const char* szReaso
va_end(args);
}
+#endif
+
//
// Both test and if necessary, spit out an error, to see if the node is really
// an l-value that can be operated on this way.
@@ -149,15 +153,13 @@ bool TParseContextBase::lValueErrorCheck(const TSourceLoc& loc, const char* op,
case EvqConst: message = "can't modify a const"; break;
case EvqConstReadOnly: message = "can't modify a const"; break;
case EvqUniform: message = "can't modify a uniform"; break;
+#ifndef GLSLANG_WEB
case EvqBuffer:
- if (node->getQualifier().readonly)
+ if (node->getQualifier().isReadOnly())
message = "can't modify a readonly buffer";
-#ifdef NV_EXTENSIONS
- if (node->getQualifier().layoutShaderRecordNV)
+ if (node->getQualifier().isShaderRecordNV())
message = "can't modify a shaderrecordnv qualified buffer";
-#endif
break;
-#ifdef NV_EXTENSIONS
case EvqHitAttrNV:
if (language != EShLangIntersectNV)
message = "cannot modify hitAttributeNV in this stage";
@@ -172,13 +174,13 @@ bool TParseContextBase::lValueErrorCheck(const TSourceLoc& loc, const char* op,
case EbtSampler:
message = "can't modify a sampler";
break;
- case EbtAtomicUint:
- message = "can't modify an atomic_uint";
- break;
case EbtVoid:
message = "can't modify void";
break;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
+ case EbtAtomicUint:
+ message = "can't modify an atomic_uint";
+ break;
case EbtAccStructNV:
message = "can't modify accelerationStructureNV";
break;
@@ -234,7 +236,7 @@ void TParseContextBase::rValueErrorCheck(const TSourceLoc& loc, const char* op,
}
TIntermSymbol* symNode = node->getAsSymbolNode();
- if (symNode && symNode->getQualifier().writeonly)
+ if (symNode && symNode->getQualifier().isWriteOnly())
error(loc, "can't read from writeonly object: ", op, symNode->getName().c_str());
}
@@ -254,11 +256,17 @@ void TParseContextBase::trackLinkage(TSymbol& symbol)
// Give an error if not.
void TParseContextBase::checkIndex(const TSourceLoc& loc, const TType& type, int& index)
{
+ const auto sizeIsSpecializationExpression = [&type]() {
+ return type.containsSpecializationSize() &&
+ type.getArraySizes()->getOuterNode() != nullptr &&
+ type.getArraySizes()->getOuterNode()->getAsSymbolNode() == nullptr; };
+
if (index < 0) {
error(loc, "", "[", "index out of range '%d'", index);
index = 0;
} else if (type.isArray()) {
- if (type.isSizedArray() && index >= type.getOuterArraySize()) {
+ if (type.isSizedArray() && !sizeIsSpecializationExpression() &&
+ index >= type.getOuterArraySize()) {
error(loc, "", "[", "array index out of range '%d'", index);
index = type.getOuterArraySize() - 1;
}
@@ -568,6 +576,7 @@ void TParseContextBase::parseSwizzleSelector(const TSourceLoc& loc, const TStrin
selector.push_back(0);
}
+#ifdef ENABLE_HLSL
//
// Make the passed-in variable information become a member of the
// global uniform block. If this doesn't exist yet, make it.
@@ -612,6 +621,7 @@ void TParseContextBase::growGlobalUniformBlock(const TSourceLoc& loc, TType& mem
++firstNewMember;
}
+#endif
void TParseContextBase::finish()
{
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp
index 6a8d379b09..a2224e1609 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.cpp
@@ -2,7 +2,7 @@
// Copyright (C) 2002-2005 3Dlabs Inc. Ltd.
// Copyright (C) 2012-2015 LunarG, Inc.
// Copyright (C) 2015-2018 Google, Inc.
-// Copyright (C) 2017 ARM Limited.
+// Copyright (C) 2017, 2019 ARM Limited.
//
// All rights reserved.
//
@@ -56,13 +56,16 @@ TParseContext::TParseContext(TSymbolTable& symbolTable, TIntermediate& interm, b
infoSink, forwardCompatible, messages, entryPoint),
inMain(false),
blockName(nullptr),
- limits(resources.limits),
+ limits(resources.limits)
+#ifndef GLSLANG_WEB
+ ,
atomicUintOffsets(nullptr), anyIndexLimits(false)
+#endif
{
// decide whether precision qualifiers should be ignored or respected
- if (profile == EEsProfile || spvVersion.vulkan > 0) {
+ if (isEsProfile() || spvVersion.vulkan > 0) {
precisionManager.respectPrecisionQualifiers();
- if (! parsingBuiltins && language == EShLangFragment && profile != EEsProfile && spvVersion.vulkan > 0)
+ if (! parsingBuiltins && language == EShLangFragment && !isEsProfile() && spvVersion.vulkan > 0)
precisionManager.warnAboutDefaults();
}
@@ -83,6 +86,7 @@ TParseContext::TParseContext(TSymbolTable& symbolTable, TIntermediate& interm, b
globalInputDefaults.clear();
globalOutputDefaults.clear();
+#ifndef GLSLANG_WEB
// "Shaders in the transform
// feedback capturing mode have an initial global default of
// layout(xfb_buffer = 0) out;"
@@ -94,6 +98,7 @@ TParseContext::TParseContext(TSymbolTable& symbolTable, TIntermediate& interm, b
if (language == EShLangGeometry)
globalOutputDefaults.layoutStream = 0;
+#endif
if (entryPoint != nullptr && entryPoint->size() > 0 && *entryPoint != "main")
infoSink.info.message(EPrefixError, "Source entry point must be \"main\"");
@@ -101,7 +106,9 @@ TParseContext::TParseContext(TSymbolTable& symbolTable, TIntermediate& interm, b
TParseContext::~TParseContext()
{
+#ifndef GLSLANG_WEB
delete [] atomicUintOffsets;
+#endif
}
// Set up all default precisions as needed by the current environment.
@@ -121,7 +128,7 @@ void TParseContext::setPrecisionDefaults()
// replace with real precision defaults for those that have them
if (obeyPrecisionQualifiers()) {
- if (profile == EEsProfile) {
+ if (isEsProfile()) {
// Most don't have defaults, a few default to lowp.
TSampler sampler;
sampler.set(EbtFloat, Esd2D);
@@ -129,7 +136,7 @@ void TParseContext::setPrecisionDefaults()
sampler.set(EbtFloat, EsdCube);
defaultSamplerPrecision[computeSamplerTypeIndex(sampler)] = EpqLow;
sampler.set(EbtFloat, Esd2D);
- sampler.external = true;
+ sampler.setExternal(true);
defaultSamplerPrecision[computeSamplerTypeIndex(sampler)] = EpqLow;
}
@@ -138,7 +145,7 @@ void TParseContext::setPrecisionDefaults()
// is used to resolve the precision from the supplied arguments/operands instead.
// So, we don't actually want to replace EpqNone with a default precision for built-ins.
if (! parsingBuiltins) {
- if (profile == EEsProfile && language == EShLangFragment) {
+ if (isEsProfile() && language == EShLangFragment) {
defaultPrecision[EbtInt] = EpqMedium;
defaultPrecision[EbtUint] = EpqMedium;
} else {
@@ -147,7 +154,7 @@ void TParseContext::setPrecisionDefaults()
defaultPrecision[EbtFloat] = EpqHigh;
}
- if (profile != EEsProfile) {
+ if (!isEsProfile()) {
// Non-ES profile
// All sampler precisions default to highp.
for (int type = 0; type < maxSamplerIndex; ++type)
@@ -163,7 +170,9 @@ void TParseContext::setPrecisionDefaults()
void TParseContext::setLimits(const TBuiltInResource& r)
{
resources = r;
+ intermediate.setLimits(r);
+#ifndef GLSLANG_WEB
anyIndexLimits = ! limits.generalAttributeMatrixVectorIndexing ||
! limits.generalConstantMatrixVectorIndexing ||
! limits.generalSamplerIndexing ||
@@ -171,7 +180,6 @@ void TParseContext::setLimits(const TBuiltInResource& r)
! limits.generalVariableIndexing ||
! limits.generalVaryingIndexing;
- intermediate.setLimits(resources);
// "Each binding point tracks its own current default offset for
// inheritance of subsequent variables using the same binding. The initial state of compilation is that all
@@ -179,6 +187,7 @@ void TParseContext::setLimits(const TBuiltInResource& r)
atomicUintOffsets = new int[resources.maxAtomicCounterBindings];
for (int b = 0; b < resources.maxAtomicCounterBindings; ++b)
atomicUintOffsets[b] = 0;
+#endif
}
//
@@ -213,6 +222,7 @@ void TParseContext::parserError(const char* s)
void TParseContext::handlePragma(const TSourceLoc& loc, const TVector<TString>& tokens)
{
+#ifndef GLSLANG_WEB
if (pragmaCallback)
pragmaCallback(loc.line, tokens);
@@ -285,6 +295,7 @@ void TParseContext::handlePragma(const TSourceLoc& loc, const TVector<TString>&
warn(loc, "not implemented", "#pragma once", "");
} else if (tokens[0].compare("glslang_binary_double_output") == 0)
intermediate.setBinaryDoubleOutput();
+#endif
}
//
@@ -298,6 +309,7 @@ TIntermTyped* TParseContext::handleVariable(const TSourceLoc& loc, TSymbol* symb
if (symbol && symbol->getNumExtensions())
requireExtensions(loc, symbol->getNumExtensions(), symbol->getExtensions(), symbol->getName().c_str());
+#ifndef GLSLANG_WEB
if (symbol && symbol->isReadOnly()) {
// All shared things containing an unsized array must be copied up
// on first use, so that all future references will share its array structure,
@@ -307,11 +319,17 @@ TIntermTyped* TParseContext::handleVariable(const TSourceLoc& loc, TSymbol* symb
// If this is a variable or a block, check it and all it contains, but if this
// is a member of an anonymous block, check the whole block, as the whole block
// will need to be copied up if it contains an unsized array.
- if (symbol->getType().containsUnsizedArray() ||
- (symbol->getAsAnonMember() &&
- symbol->getAsAnonMember()->getAnonContainer().getType().containsUnsizedArray()))
- makeEditable(symbol);
+ //
+ // This check is being done before the block-name check further down, so guard
+ // for that too.
+ if (!symbol->getType().isUnusableName()) {
+ if (symbol->getType().containsUnsizedArray() ||
+ (symbol->getAsAnonMember() &&
+ symbol->getAsAnonMember()->getAnonContainer().getType().containsUnsizedArray()))
+ makeEditable(symbol);
+ }
}
+#endif
const TVariable* variable;
const TAnonMember* anon = symbol ? symbol->getAsAnonMember() : nullptr;
@@ -334,8 +352,7 @@ TIntermTyped* TParseContext::handleVariable(const TSourceLoc& loc, TSymbol* symb
// See if it was a variable.
variable = symbol ? symbol->getAsVariable() : nullptr;
if (variable) {
- if ((variable->getType().getBasicType() == EbtBlock ||
- variable->getType().getBasicType() == EbtStruct) && variable->getType().getStruct() == nullptr) {
+ if (variable->getType().isUnusableName()) {
error(loc, "cannot be used (maybe an instance name is needed)", string->c_str(), "");
variable = nullptr;
}
@@ -357,7 +374,7 @@ TIntermTyped* TParseContext::handleVariable(const TSourceLoc& loc, TSymbol* symb
if (variable->getType().getQualifier().isIo())
intermediate.addIoAccessed(*string);
- if (variable->getType().getBasicType() == EbtReference &&
+ if (variable->getType().isReference() &&
variable->getType().getQualifier().bufferReferenceNeedsVulkanMemoryModel()) {
intermediate.setUseVulkanMemoryModel();
}
@@ -378,7 +395,7 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
variableCheck(base);
if (! base->isArray() && ! base->isMatrix() && ! base->isVector() && ! base->getType().isCoopMat() &&
- base->getBasicType() != EbtReference) {
+ ! base->isReference()) {
if (base->getAsSymbolNode())
error(loc, " left of '[' is not of type array, matrix, or vector ", base->getAsSymbolNode()->getName().c_str(), "");
else
@@ -389,7 +406,7 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
}
if (!base->isArray() && base->isVector()) {
- if (base->getType().containsBasicType(EbtFloat16))
+ if (base->getType().contains16BitFloat())
requireFloat16Arithmetic(loc, "[", "does not operate on types containing float16");
if (base->getType().contains16BitInt())
requireInt16Arithmetic(loc, "[", "does not operate on types containing (u)int16");
@@ -407,23 +424,24 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
// at least one of base and index is not a front-end constant variable...
TIntermTyped* result = nullptr;
- if (base->getBasicType() == EbtReference && ! base->isArray()) {
+#ifndef GLSLANG_WEB
+ if (base->isReference() && ! base->isArray()) {
requireExtensions(loc, 1, &E_GL_EXT_buffer_reference2, "buffer reference indexing");
result = intermediate.addBinaryMath(EOpAdd, base, index, loc);
result->setType(base->getType());
return result;
}
+ if (base->getAsSymbolNode() && isIoResizeArray(base->getType()))
+ handleIoResizeArrayAccess(loc, base);
+#endif
if (index->getQualifier().isFrontEndConstant())
checkIndex(loc, base->getType(), indexValue);
- if (base->getAsSymbolNode() && isIoResizeArray(base->getType()))
- handleIoResizeArrayAccess(loc, base);
-
if (index->getQualifier().isFrontEndConstant()) {
+#ifndef GLSLANG_WEB
if (base->getType().isUnsizedArray()) {
base->getWritableType().updateImplicitArraySize(indexValue + 1);
-#ifdef NV_EXTENSIONS
// For 2D per-view builtin arrays, update the inner dimension size in parent type
if (base->getQualifier().isPerView() && base->getQualifier().builtIn != EbvNone) {
TIntermBinary* binaryNode = base->getAsBinaryNode();
@@ -434,11 +452,12 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
arraySizes.setDimSize(1, std::max(arraySizes.getDimSize(1), indexValue + 1));
}
}
-#endif
} else
+#endif
checkIndex(loc, base->getType(), indexValue);
result = intermediate.addIndex(EOpIndexDirect, base, index, loc);
} else {
+#ifndef GLSLANG_WEB
if (base->getType().isUnsizedArray()) {
// we have a variable index into an unsized array, which is okay,
// depending on the situation
@@ -450,6 +469,7 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
}
base->getWritableType().setArrayVariablyIndexed();
}
+#endif
if (base->getBasicType() == EbtBlock) {
if (base->getQualifier().storage == EvqBuffer)
requireProfile(base->getLoc(), ~EEsProfile, "variable indexing buffer block array");
@@ -457,7 +477,7 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
profileRequires(base->getLoc(), EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5,
"variable indexing uniform block array");
else {
- // input/output blocks either don't exist or can be variable indexed
+ // input/output blocks either don't exist or can't be variably indexed
}
} else if (language == EShLangFragment && base->getQualifier().isPipeOutput())
requireProfile(base->getLoc(), ~EEsProfile, "variable indexing fragment shader output array");
@@ -471,8 +491,8 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
result = intermediate.addIndex(EOpIndexIndirect, base, index, loc);
}
- // Insert valid dereferenced result
- TType newType(base->getType(), 0); // dereferenced type
+ // Insert valid dereferenced result type
+ TType newType(base->getType(), 0);
if (base->getType().getQualifier().isConstant() && index->getQualifier().isConstant()) {
newType.getQualifier().storage = EvqConst;
// If base or index is a specialization constant, the result should also be a specialization constant.
@@ -480,20 +500,27 @@ TIntermTyped* TParseContext::handleBracketDereference(const TSourceLoc& loc, TIn
newType.getQualifier().makeSpecConstant();
}
} else {
- newType.getQualifier().makePartialTemporary();
+ newType.getQualifier().storage = EvqTemporary;
+ newType.getQualifier().specConstant = false;
}
result->setType(newType);
+#ifndef GLSLANG_WEB
+ inheritMemoryQualifiers(base->getQualifier(), result->getWritableType().getQualifier());
+
// Propagate nonuniform
if (base->getQualifier().isNonUniform() || index->getQualifier().isNonUniform())
result->getWritableType().getQualifier().nonUniform = true;
if (anyIndexLimits)
handleIndexLimits(loc, base, index);
+#endif
return result;
}
+#ifndef GLSLANG_WEB
+
// for ES 2.0 (version 100) limitations for almost all index operations except vertex-shader uniforms
void TParseContext::handleIndexLimits(const TSourceLoc& /*loc*/, TIntermTyped* base, TIntermTyped* index)
{
@@ -530,14 +557,12 @@ bool TParseContext::isIoResizeArray(const TType& type) const
{
return type.isArray() &&
((language == EShLangGeometry && type.getQualifier().storage == EvqVaryingIn) ||
- (language == EShLangTessControl && type.getQualifier().storage == EvqVaryingOut && ! type.getQualifier().patch)
-#ifdef NV_EXTENSIONS
- ||
- (language == EShLangFragment && type.getQualifier().storage == EvqVaryingIn && type.getQualifier().pervertexNV) ||
- (language == EShLangMeshNV && type.getQualifier().storage == EvqVaryingOut && !type.getQualifier().perTaskNV)
-
-#endif
- );
+ (language == EShLangTessControl && type.getQualifier().storage == EvqVaryingOut &&
+ ! type.getQualifier().patch) ||
+ (language == EShLangFragment && type.getQualifier().storage == EvqVaryingIn &&
+ type.getQualifier().pervertexNV) ||
+ (language == EShLangMeshNV && type.getQualifier().storage == EvqVaryingOut &&
+ !type.getQualifier().perTaskNV));
}
// If an array is not isIoResizeArray() but is an io array, make sure it has the right size
@@ -566,11 +591,7 @@ void TParseContext::fixIoArraySize(const TSourceLoc& loc, TType& type)
void TParseContext::ioArrayCheck(const TSourceLoc& loc, const TType& type, const TString& identifier)
{
if (! type.isArray() && ! symbolTable.atBuiltInLevel()) {
- if (type.getQualifier().isArrayedIo(language)
-#ifdef NV_EXTENSIONS
- && !type.getQualifier().layoutPassthrough
-#endif
- )
+ if (type.getQualifier().isArrayedIo(language) && !type.getQualifier().layoutPassthrough)
error(loc, "type must be an array:", type.getStorageQualifierString(), identifier.c_str());
}
}
@@ -617,12 +638,7 @@ void TParseContext::checkIoArraysConsistency(const TSourceLoc &loc, bool tailOnl
// As I/O array sizes don't change, fetch requiredSize only once,
// except for mesh shaders which could have different I/O array sizes based on type qualifiers.
- if (firstIteration
-#ifdef NV_EXTENSIONS
- || (language == EShLangMeshNV)
-#endif
- )
- {
+ if (firstIteration || (language == EShLangMeshNV)) {
requiredSize = getIoArrayImplicitSize(type.getQualifier(), &featureString);
if (requiredSize == 0)
break;
@@ -647,14 +663,11 @@ int TParseContext::getIoArrayImplicitSize(const TQualifier &qualifier, TString *
else if (language == EShLangTessControl) {
expectedSize = maxVertices;
str = "vertices";
- }
-#ifdef NV_EXTENSIONS
- else if (language == EShLangFragment) {
+ } else if (language == EShLangFragment) {
// Number of vertices for Fragment shader is always three.
expectedSize = 3;
str = "vertices";
- }
- else if (language == EShLangMeshNV) {
+ } else if (language == EShLangMeshNV) {
unsigned int maxPrimitives =
intermediate.getPrimitives() != TQualifier::layoutNotSet ? intermediate.getPrimitives() : 0;
if (qualifier.builtIn == EbvPrimitiveIndicesNV) {
@@ -671,7 +684,6 @@ int TParseContext::getIoArrayImplicitSize(const TQualifier &qualifier, TString *
str = "max_vertices";
}
}
-#endif
if (featureString)
*featureString = str;
return expectedSize;
@@ -686,19 +698,19 @@ void TParseContext::checkIoArrayConsistency(const TSourceLoc& loc, int requiredS
error(loc, "inconsistent input primitive for array size of", feature, name.c_str());
else if (language == EShLangTessControl)
error(loc, "inconsistent output number of vertices for array size of", feature, name.c_str());
-#ifdef NV_EXTENSIONS
else if (language == EShLangFragment) {
if (type.getOuterArraySize() > requiredSize)
error(loc, " cannot be greater than 3 for pervertexNV", feature, name.c_str());
}
else if (language == EShLangMeshNV)
error(loc, "inconsistent output array size of", feature, name.c_str());
-#endif
else
assert(0);
}
}
+#endif // GLSLANG_WEB
+
// Handle seeing a binary node with a math operation.
// Returns nullptr if not semantically allowed.
TIntermTyped* TParseContext::handleBinaryMath(const TSourceLoc& loc, const char* str, TOperator op, TIntermTyped* left, TIntermTyped* right)
@@ -721,7 +733,7 @@ TIntermTyped* TParseContext::handleBinaryMath(const TSourceLoc& loc, const char*
break;
}
- if (((left->getType().containsBasicType(EbtFloat16) || right->getType().containsBasicType(EbtFloat16)) && !float16Arithmetic()) ||
+ if (((left->getType().contains16BitFloat() || right->getType().contains16BitFloat()) && !float16Arithmetic()) ||
((left->getType().contains16BitInt() || right->getType().contains16BitInt()) && !int16Arithmetic()) ||
((left->getType().contains8BitInt() || right->getType().contains8BitInt()) && !int8Arithmetic())) {
allowed = false;
@@ -743,14 +755,13 @@ TIntermTyped* TParseContext::handleUnaryMath(const TSourceLoc& loc, const char*
rValueErrorCheck(loc, str, childNode);
bool allowed = true;
- if ((childNode->getType().containsBasicType(EbtFloat16) && !float16Arithmetic()) ||
+ if ((childNode->getType().contains16BitFloat() && !float16Arithmetic()) ||
(childNode->getType().contains16BitInt() && !int16Arithmetic()) ||
(childNode->getType().contains8BitInt() && !int8Arithmetic())) {
allowed = false;
}
TIntermTyped* result = nullptr;
-
if (allowed)
result = intermediate.addUnaryMath(op, childNode, loc);
@@ -819,7 +830,7 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm
TSwizzleSelectors<TVectorSelector> selectors;
parseSwizzleSelector(loc, field, base->getVectorSize(), selectors);
- if (base->isVector() && selectors.size() != 1 && base->getType().containsBasicType(EbtFloat16))
+ if (base->isVector() && selectors.size() != 1 && base->getType().contains16BitFloat())
requireFloat16Arithmetic(loc, ".", "can't swizzle types containing float16");
if (base->isVector() && selectors.size() != 1 && base->getType().contains16BitInt())
requireInt16Arithmetic(loc, ".", "can't swizzle types containing (u)int16");
@@ -854,12 +865,10 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm
if (base->getType().getQualifier().isSpecConstant())
result->getWritableType().getQualifier().makeSpecConstant();
}
- } else if (base->getBasicType() == EbtStruct ||
- base->getBasicType() == EbtBlock ||
- base->getBasicType() == EbtReference) {
- const TTypeList* fields = base->getBasicType() == EbtReference ?
- base->getType().getReferentType()->getStruct() :
- base->getType().getStruct();
+ } else if (base->isStruct() || base->isReference()) {
+ const TTypeList* fields = base->isReference() ?
+ base->getType().getReferentType()->getStruct() :
+ base->getType().getStruct();
bool fieldFound = false;
int member;
for (member = 0; member < (int)fields->size(); ++member) {
@@ -879,14 +888,15 @@ TIntermTyped* TParseContext::handleDotDereference(const TSourceLoc& loc, TInterm
if ((*fields)[member].type->getQualifier().isIo())
intermediate.addIoAccessed(field);
}
+ inheritMemoryQualifiers(base->getQualifier(), result->getWritableType().getQualifier());
} else
error(loc, "no such field in structure", field.c_str(), "");
} else
error(loc, "does not apply to this type:", field.c_str(), base->getType().getCompleteString().c_str());
// Propagate noContraction up the dereference chain
- if (base->getQualifier().noContraction)
- result->getWritableType().getQualifier().noContraction = true;
+ if (base->getQualifier().isNoContraction())
+ result->getWritableType().getQualifier().setNoContraction();
// Propagate nonuniform
if (base->getQualifier().isNonUniform())
@@ -1126,7 +1136,7 @@ TIntermTyped* TParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction
if (builtIn && fnCandidate->getNumExtensions())
requireExtensions(loc, fnCandidate->getNumExtensions(), fnCandidate->getExtensions(), fnCandidate->getName().c_str());
- if (builtIn && fnCandidate->getType().containsBasicType(EbtFloat16))
+ if (builtIn && fnCandidate->getType().contains16BitFloat())
requireFloat16Arithmetic(loc, "built-in function", "float16 types can only be in uniform block or buffer storage");
if (builtIn && fnCandidate->getType().contains16BitInt())
requireInt16Arithmetic(loc, "built-in function", "(u)int16 types can only be in uniform block or buffer storage");
@@ -1146,9 +1156,11 @@ TIntermTyped* TParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction
if (lValueErrorCheck(arguments->getLoc(), "assign", arg->getAsTyped()))
error(arguments->getLoc(), "Non-L-value cannot be passed for 'out' or 'inout' parameters.", "out", "");
}
- TQualifier& argQualifier = arg->getAsTyped()->getQualifier();
- if (argQualifier.isMemory()) {
+ const TType& argType = arg->getAsTyped()->getType();
+ const TQualifier& argQualifier = argType.getQualifier();
+ if (argQualifier.isMemory() && (argType.containsOpaque() || argType.isReference())) {
const char* message = "argument cannot drop memory qualifier when passed to formal parameter";
+#ifndef GLSLANG_WEB
if (argQualifier.volatil && ! formalQualifier.volatil)
error(arguments->getLoc(), message, "volatile", "");
if (argQualifier.coherent && ! (formalQualifier.devicecoherent || formalQualifier.coherent))
@@ -1165,18 +1177,19 @@ TIntermTyped* TParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction
error(arguments->getLoc(), message, "readonly", "");
if (argQualifier.writeonly && ! formalQualifier.writeonly)
error(arguments->getLoc(), message, "writeonly", "");
- if (!builtIn && argQualifier.restrict && ! formalQualifier.restrict)
- error(arguments->getLoc(), message, "restrict", "");
+ // Don't check 'restrict', it is different than the rest:
+ // "...but only restrict can be taken away from a calling argument, by a formal parameter that
+ // lacks the restrict qualifier..."
+#endif
}
- if (!builtIn && argQualifier.layoutFormat != formalQualifier.layoutFormat) {
+ if (!builtIn && argQualifier.getFormat() != formalQualifier.getFormat()) {
// we have mismatched formats, which should only be allowed if writeonly
// and at least one format is unknown
- if (!formalQualifier.writeonly || (formalQualifier.layoutFormat != ElfNone &&
- argQualifier.layoutFormat != ElfNone))
+ if (!formalQualifier.isWriteOnly() || (formalQualifier.getFormat() != ElfNone &&
+ argQualifier.getFormat() != ElfNone))
error(arguments->getLoc(), "image formats must match", "format", "");
}
-
- if (builtIn && arg->getAsTyped()->getType().containsBasicType(EbtFloat16))
+ if (builtIn && arg->getAsTyped()->getType().contains16BitFloat())
requireFloat16Arithmetic(arguments->getLoc(), "built-in function", "float16 types can only be in uniform block or buffer storage");
if (builtIn && arg->getAsTyped()->getType().contains16BitInt())
requireInt16Arithmetic(arguments->getLoc(), "built-in function", "(u)int16 types can only be in uniform block or buffer storage");
@@ -1216,9 +1229,11 @@ TIntermTyped* TParseContext::handleFunctionCall(const TSourceLoc& loc, TFunction
intermediate.addToCallGraph(infoSink, currentCaller, fnCandidate->getMangledName());
}
+#ifndef GLSLANG_WEB
if (builtIn)
nonOpBuiltInCheck(loc, *fnCandidate, *call);
else
+#endif
userFunctionCallCheck(loc, *call);
}
@@ -1347,13 +1362,9 @@ void TParseContext::computeBuiltinPrecisions(TIntermTyped& node, const TFunction
operationPrecision = std::max(operationPrecision, function[arg].type->getQualifier().precision);
}
// compute the result precision
-#ifdef AMD_EXTENSIONS
if (agg->isSampling() ||
agg->getOp() == EOpImageLoad || agg->getOp() == EOpImageStore ||
agg->getOp() == EOpImageLoadLod || agg->getOp() == EOpImageStoreLod)
-#else
- if (agg->isSampling() || agg->getOp() == EOpImageLoad || agg->getOp() == EOpImageStore)
-#endif
resultPrecision = sequence[0]->getAsTyped()->getQualifier().precision;
else if (function.getType().getBasicType() != EbtBool)
resultPrecision = function.getType().getQualifier().precision == EpqNone ?
@@ -1374,7 +1385,9 @@ void TParseContext::computeBuiltinPrecisions(TIntermTyped& node, const TFunction
TIntermNode* TParseContext::handleReturnValue(const TSourceLoc& loc, TIntermTyped* value)
{
+#ifndef GLSLANG_WEB
storage16BitAssignmentCheck(loc, value->getType(), "return");
+#endif
functionReturnsValue = true;
if (currentFunctionType->getBasicType() == EbtVoid) {
@@ -1399,6 +1412,7 @@ TIntermNode* TParseContext::handleReturnValue(const TSourceLoc& loc, TIntermType
// See if the operation is being done in an illegal location.
void TParseContext::checkLocation(const TSourceLoc& loc, TOperator op)
{
+#ifndef GLSLANG_WEB
switch (op) {
case EOpBarrier:
if (language == EShLangTessControl) {
@@ -1410,9 +1424,48 @@ void TParseContext::checkLocation(const TSourceLoc& loc, TOperator op)
error(loc, "tessellation control barrier() cannot be placed after a return from main()", "", "");
}
break;
+ case EOpBeginInvocationInterlock:
+ if (language != EShLangFragment)
+ error(loc, "beginInvocationInterlockARB() must be in a fragment shader", "", "");
+ if (! inMain)
+ error(loc, "beginInvocationInterlockARB() must be in main()", "", "");
+ else if (postEntryPointReturn)
+ error(loc, "beginInvocationInterlockARB() cannot be placed after a return from main()", "", "");
+ if (controlFlowNestingLevel > 0)
+ error(loc, "beginInvocationInterlockARB() cannot be placed within flow control", "", "");
+
+ if (beginInvocationInterlockCount > 0)
+ error(loc, "beginInvocationInterlockARB() must only be called once", "", "");
+ if (endInvocationInterlockCount > 0)
+ error(loc, "beginInvocationInterlockARB() must be called before endInvocationInterlockARB()", "", "");
+
+ beginInvocationInterlockCount++;
+
+ // default to pixel_interlock_ordered
+ if (intermediate.getInterlockOrdering() == EioNone)
+ intermediate.setInterlockOrdering(EioPixelInterlockOrdered);
+ break;
+ case EOpEndInvocationInterlock:
+ if (language != EShLangFragment)
+ error(loc, "endInvocationInterlockARB() must be in a fragment shader", "", "");
+ if (! inMain)
+ error(loc, "endInvocationInterlockARB() must be in main()", "", "");
+ else if (postEntryPointReturn)
+ error(loc, "endInvocationInterlockARB() cannot be placed after a return from main()", "", "");
+ if (controlFlowNestingLevel > 0)
+ error(loc, "endInvocationInterlockARB() cannot be placed within flow control", "", "");
+
+ if (endInvocationInterlockCount > 0)
+ error(loc, "endInvocationInterlockARB() must only be called once", "", "");
+ if (beginInvocationInterlockCount == 0)
+ error(loc, "beginInvocationInterlockARB() must be called before endInvocationInterlockARB()", "", "");
+
+ endInvocationInterlockCount++;
+ break;
default:
break;
}
+#endif
}
// Finish processing object.length(). This started earlier in handleDotDereference(), where
@@ -1430,29 +1483,28 @@ TIntermTyped* TParseContext::handleLengthMethod(const TSourceLoc& loc, TFunction
const TType& type = intermNode->getAsTyped()->getType();
if (type.isArray()) {
if (type.isUnsizedArray()) {
+#ifndef GLSLANG_WEB
if (intermNode->getAsSymbolNode() && isIoResizeArray(type)) {
// We could be between a layout declaration that gives a built-in io array implicit size and
// a user redeclaration of that array, meaning we have to substitute its implicit size here
// without actually redeclaring the array. (It is an error to use a member before the
// redeclaration, but not an error to use the array name itself.)
const TString& name = intermNode->getAsSymbolNode()->getName();
- if (name == "gl_in" || name == "gl_out"
-#ifdef NV_EXTENSIONS
- || name == "gl_MeshVerticesNV"
- || name == "gl_MeshPrimitivesNV"
-#endif
- )
- {
+ if (name == "gl_in" || name == "gl_out" || name == "gl_MeshVerticesNV" ||
+ name == "gl_MeshPrimitivesNV") {
length = getIoArrayImplicitSize(type.getQualifier());
}
}
+#endif
if (length == 0) {
+#ifndef GLSLANG_WEB
if (intermNode->getAsSymbolNode() && isIoResizeArray(type))
error(loc, "", function->getName().c_str(), "array must first be sized by a redeclaration or layout qualifier");
else if (isRuntimeLength(*intermNode->getAsTyped())) {
// Create a unary op and let the back end handle it
return intermediate.addBuiltInFunctionCall(loc, EOpArrayLength, true, intermNode, TType(EbtInt));
} else
+#endif
error(loc, "", function->getName().c_str(), "array must be declared with a size before using this method");
}
} else if (type.getOuterArrayNode()) {
@@ -1485,6 +1537,7 @@ TIntermTyped* TParseContext::handleLengthMethod(const TSourceLoc& loc, TFunction
//
void TParseContext::addInputArgumentConversions(const TFunction& function, TIntermNode*& arguments) const
{
+#ifndef GLSLANG_WEB
TIntermAggregate* aggregate = arguments->getAsAggregate();
// Process each argument's conversion
@@ -1512,6 +1565,7 @@ void TParseContext::addInputArgumentConversions(const TFunction& function, TInte
}
}
}
+#endif
}
//
@@ -1523,6 +1577,9 @@ void TParseContext::addInputArgumentConversions(const TFunction& function, TInte
//
TIntermTyped* TParseContext::addOutputArgumentConversions(const TFunction& function, TIntermAggregate& intermNode) const
{
+#ifdef GLSLANG_WEB
+ return &intermNode;
+#else
TIntermSequence& arguments = intermNode.getSequence();
// Will there be any output conversions?
@@ -1590,6 +1647,7 @@ TIntermTyped* TParseContext::addOutputArgumentConversions(const TFunction& funct
conversionTree = intermediate.setAggregateOperator(conversionTree, EOpComma, intermNode.getType(), intermNode.getLoc());
return conversionTree;
+#endif
}
void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction& fnCandidate, const TIntermOperator& callNode)
@@ -1602,6 +1660,7 @@ void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction&
const int gl_SemanticsAcquireRelease = 0x8;
const int gl_SemanticsMakeAvailable = 0x2000;
const int gl_SemanticsMakeVisible = 0x4000;
+ const int gl_SemanticsVolatile = 0x8000;
//const int gl_StorageSemanticsNone = 0x0;
const int gl_StorageSemanticsBuffer = 0x40;
@@ -1691,7 +1750,8 @@ void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction&
gl_SemanticsRelease |
gl_SemanticsAcquireRelease |
gl_SemanticsMakeAvailable |
- gl_SemanticsMakeVisible))) {
+ gl_SemanticsMakeVisible |
+ gl_SemanticsVolatile))) {
error(loc, "Invalid semantics value", fnCandidate.getName().c_str(), "");
}
if (((storageClassSemantics | storageClassSemantics2) & ~(gl_StorageSemanticsBuffer |
@@ -1743,10 +1803,18 @@ void TParseContext::memorySemanticsCheck(const TSourceLoc& loc, const TFunction&
error(loc, "gl_SemanticsMakeVisible requires gl_SemanticsAcquire or gl_SemanticsAcquireRelease",
fnCandidate.getName().c_str(), "");
}
-
+ if ((semantics & gl_SemanticsVolatile) &&
+ (callNode.getOp() == EOpMemoryBarrier || callNode.getOp() == EOpBarrier)) {
+ error(loc, "gl_SemanticsVolatile must not be used with memoryBarrier or controlBarrier",
+ fnCandidate.getName().c_str(), "");
+ }
+ if ((callNode.getOp() == EOpAtomicCompSwap || callNode.getOp() == EOpImageAtomicCompSwap) &&
+ ((semantics ^ semantics2) & gl_SemanticsVolatile)) {
+ error(loc, "semEqual and semUnequal must either both include gl_SemanticsVolatile or neither",
+ fnCandidate.getName().c_str(), "");
+ }
}
-
//
// Do additional checking of built-in function calls that is not caught
// by normal semantic checks on argument type, extension tagging, etc.
@@ -1774,6 +1842,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
TString featureString;
const char* feature = nullptr;
switch (callNode.getOp()) {
+#ifndef GLSLANG_WEB
case EOpTextureGather:
case EOpTextureGatherOffset:
case EOpTextureGatherOffsets:
@@ -1830,7 +1899,6 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
error(loc, "must be a compile-time constant:", feature, "component argument");
}
-#ifdef AMD_EXTENSIONS
bool bias = false;
if (callNode.getOp() == EOpTextureGather)
bias = fnCandidate.getParamCount() > 3;
@@ -1845,12 +1913,8 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
profileRequires(loc, ~EEsProfile, 450, nullptr, feature);
requireExtensions(loc, 1, &E_GL_AMD_texture_gather_bias_lod, feature);
}
-#endif
-
break;
}
-
-#ifdef AMD_EXTENSIONS
case EOpSparseTextureGather:
case EOpSparseTextureGatherOffset:
case EOpSparseTextureGatherOffsets:
@@ -1928,7 +1992,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
int arg = -1;
switch (callNode.getOp()) {
case EOpTextureOffset: arg = 2; break;
- case EOpTextureFetchOffset: arg = (arg0->getType().getSampler().dim != EsdRect) ? 3 : 2; break;
+ case EOpTextureFetchOffset: arg = (arg0->getType().getSampler().isRect()) ? 2 : 3; break;
case EOpTextureProjOffset: arg = 2; break;
case EOpTextureLodOffset: arg = 3; break;
case EOpTextureProjLodOffset: arg = 3; break;
@@ -1941,7 +2005,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
if (arg > 0) {
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
bool f16ShadowCompare = (*argp)[1]->getAsTyped()->getBasicType() == EbtFloat16 && arg0->getType().getSampler().shadow;
if (f16ShadowCompare)
++arg;
@@ -1961,7 +2025,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
break;
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
case EOpTraceNV:
if (!(*argp)[10]->getAsConstantUnion())
error(loc, "argument must be compile-time constant", "payload number", "");
@@ -1970,7 +2034,6 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
if (!(*argp)[1]->getAsConstantUnion())
error(loc, "argument must be compile-time constant", "callable data number", "");
break;
-#endif
case EOpTextureQuerySamples:
case EOpImageQuerySamples:
@@ -1992,12 +2055,12 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
// Make sure the image types have the correct layout() format and correct argument types
const TType& imageType = arg0->getType();
if (imageType.getSampler().type == EbtInt || imageType.getSampler().type == EbtUint) {
- if (imageType.getQualifier().layoutFormat != ElfR32i && imageType.getQualifier().layoutFormat != ElfR32ui)
+ if (imageType.getQualifier().getFormat() != ElfR32i && imageType.getQualifier().getFormat() != ElfR32ui)
error(loc, "only supported on image with format r32i or r32ui", fnCandidate.getName().c_str(), "");
} else {
if (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") != 0)
error(loc, "only supported on integer images", fnCandidate.getName().c_str(), "");
- else if (imageType.getQualifier().layoutFormat != ElfR32f && profile == EEsProfile)
+ else if (imageType.getQualifier().getFormat() != ElfR32f && isEsProfile())
error(loc, "only supported on image with format r32f", fnCandidate.getName().c_str(), "");
}
@@ -2025,13 +2088,9 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
requireExtensions(loc, 1, &E_GL_KHR_memory_scope_semantics, fnCandidate.getName().c_str());
memorySemanticsCheck(loc, fnCandidate, callNode);
} else if (arg0->getType().getBasicType() == EbtInt64 || arg0->getType().getBasicType() == EbtUint64) {
-#ifdef NV_EXTENSIONS
const char* const extensions[2] = { E_GL_NV_shader_atomic_int64,
E_GL_EXT_shader_atomic_int64 };
requireExtensions(loc, 2, extensions, fnCandidate.getName().c_str());
-#else
- requireExtensions(loc, 1, &E_GL_EXT_shader_atomic_int64, fnCandidate.getName().c_str());
-#endif
}
break;
}
@@ -2039,9 +2098,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
case EOpInterpolateAtCentroid:
case EOpInterpolateAtSample:
case EOpInterpolateAtOffset:
-#ifdef AMD_EXTENSIONS
case EOpInterpolateAtVertex:
-#endif
// Make sure the first argument is an interpolant, or an array element of an interpolant
if (arg0->getType().getQualifier().storage != EvqVaryingIn) {
// It might still be an array element.
@@ -2051,13 +2108,12 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
//
// ES and desktop 4.3 and earlier: swizzles may not be used
// desktop 4.4 and later: swizzles may be used
- bool swizzleOkay = (profile != EEsProfile) && (version >= 440);
+ bool swizzleOkay = (!isEsProfile()) && (version >= 440);
const TIntermTyped* base = TIntermediate::findLValueBase(arg0, swizzleOkay);
if (base == nullptr || base->getType().getQualifier().storage != EvqVaryingIn)
error(loc, "first argument must be an interpolant, or interpolant-array element", fnCandidate.getName().c_str(), "");
}
-#ifdef AMD_EXTENSIONS
if (callNode.getOp() == EOpInterpolateAtVertex) {
if (!arg0->getType().getQualifier().isExplicitInterpolation())
error(loc, "argument must be qualified as __explicitInterpAMD in", "interpolant", "");
@@ -2071,8 +2127,6 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
}
}
}
-#endif
-
break;
case EOpEmitStreamVertex:
@@ -2103,9 +2157,12 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
break;
case EOpSubgroupBroadcast:
- // <id> must be an integral constant expression.
- if ((*argp)[1]->getAsConstantUnion() == nullptr)
- error(loc, "argument must be compile-time constant", "id", "");
+ case EOpSubgroupQuadBroadcast:
+ if (spvVersion.spv < EShTargetSpv_1_5) {
+ // <id> must be an integral constant expression.
+ if ((*argp)[1]->getAsConstantUnion() == nullptr)
+ error(loc, "argument must be compile-time constant", "id", "");
+ }
break;
case EOpBarrier:
@@ -2115,6 +2172,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
memorySemanticsCheck(loc, fnCandidate, callNode);
}
break;
+#endif
default:
break;
@@ -2132,7 +2190,7 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
const TSampler& sampler = fnCandidate[0].type->getSampler();
const bool isTexture = sampler.isTexture() && !sampler.isCombined();
- const bool isBuffer = sampler.dim == EsdBuffer;
+ const bool isBuffer = sampler.isBuffer();
const bool isFetch = callNode.getOp() == EOpTextureFetch || callNode.getOp() == EOpTextureFetchOffset;
if (isTexture && (!isBuffer || !isFetch))
@@ -2145,13 +2203,39 @@ void TParseContext::builtInOpCheck(const TSourceLoc& loc, const TFunction& fnCan
break;
}
- if (callNode.getOp() > EOpSubgroupGuardStart && callNode.getOp() < EOpSubgroupGuardStop) {
+ if (callNode.isSubgroup()) {
// these require SPIR-V 1.3
if (spvVersion.spv > 0 && spvVersion.spv < EShTargetSpv_1_3)
error(loc, "requires SPIR-V 1.3", "subgroup op", "");
+
+ // Check that if extended types are being used that the correct extensions are enabled.
+ if (arg0 != nullptr) {
+ const TType& type = arg0->getType();
+ switch (type.getBasicType()) {
+ default:
+ break;
+ case EbtInt8:
+ case EbtUint8:
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int8, type.getCompleteString().c_str());
+ break;
+ case EbtInt16:
+ case EbtUint16:
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int16, type.getCompleteString().c_str());
+ break;
+ case EbtInt64:
+ case EbtUint64:
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_int64, type.getCompleteString().c_str());
+ break;
+ case EbtFloat16:
+ requireExtensions(loc, 1, &E_GL_EXT_shader_subgroup_extended_types_float16, type.getCompleteString().c_str());
+ break;
+ }
+ }
}
}
+#ifndef GLSLANG_WEB
+
extern bool PureOperatorBuiltins;
// Deprecated! Use PureOperatorBuiltins == true instead, in which case this
@@ -2267,17 +2351,19 @@ void TParseContext::nonOpBuiltInCheck(const TSourceLoc& loc, const TFunction& fn
if (fnCandidate.getName().compare(0, 11, "imageAtomic") == 0) {
const TType& imageType = callNode.getSequence()[0]->getAsTyped()->getType();
if (imageType.getSampler().type == EbtInt || imageType.getSampler().type == EbtUint) {
- if (imageType.getQualifier().layoutFormat != ElfR32i && imageType.getQualifier().layoutFormat != ElfR32ui)
+ if (imageType.getQualifier().getFormat() != ElfR32i && imageType.getQualifier().getFormat() != ElfR32ui)
error(loc, "only supported on image with format r32i or r32ui", fnCandidate.getName().c_str(), "");
} else {
if (fnCandidate.getName().compare(0, 19, "imageAtomicExchange") != 0)
error(loc, "only supported on integer images", fnCandidate.getName().c_str(), "");
- else if (imageType.getQualifier().layoutFormat != ElfR32f && profile == EEsProfile)
+ else if (imageType.getQualifier().getFormat() != ElfR32f && isEsProfile())
error(loc, "only supported on image with format r32f", fnCandidate.getName().c_str(), "");
}
}
}
+#endif
+
//
// Do any extra checking for a user function call.
//
@@ -2425,6 +2511,7 @@ bool TParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
bool errorReturn = false;
switch(binaryNode->getOp()) {
+#ifndef GLSLANG_WEB
case EOpIndexDirect:
case EOpIndexIndirect:
// ... tessellation control shader ...
@@ -2440,10 +2527,8 @@ bool TParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
error(loc, "tessellation-control per-vertex output l-value must be indexed with gl_InvocationID", "[]", "");
}
}
-
- break; // left node is checked by base class
- case EOpIndexDirectStruct:
break; // left node is checked by base class
+#endif
case EOpVectorSwizzle:
errorReturn = lValueErrorCheck(loc, op, binaryNode->getLeft());
if (!errorReturn) {
@@ -2475,8 +2560,7 @@ bool TParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
}
}
- if (binaryNode && binaryNode->getOp() == EOpIndexDirectStruct &&
- binaryNode->getLeft()->getBasicType() == EbtReference)
+ if (binaryNode && binaryNode->getOp() == EOpIndexDirectStruct && binaryNode->getLeft()->isReference())
return false;
// Let the base class check errors
@@ -2499,7 +2583,7 @@ bool TParseContext::lValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
case EvqFragDepth:
intermediate.setDepthReplacing();
// "In addition, it is an error to statically write to gl_FragDepth in the fragment shader."
- if (profile == EEsProfile && intermediate.getEarlyFragmentTests())
+ if (isEsProfile() && intermediate.getEarlyFragmentTests())
message = "can't modify gl_FragDepth if using early_fragment_tests";
break;
@@ -2536,12 +2620,10 @@ void TParseContext::rValueErrorCheck(const TSourceLoc& loc, const char* op, TInt
// Let the base class check errors
TParseContextBase::rValueErrorCheck(loc, op, node);
-#ifdef AMD_EXTENSIONS
TIntermSymbol* symNode = node->getAsSymbolNode();
- if (!(symNode && symNode->getQualifier().writeonly)) // base class checks
- if (symNode && symNode->getQualifier().explicitInterp)
+ if (!(symNode && symNode->getQualifier().isWriteOnly())) // base class checks
+ if (symNode && symNode->getQualifier().isExplicitInterpolation())
error(loc, "can't read from explicitly-interpolated object: ", op, symNode->getName().c_str());
-#endif
}
//
@@ -2587,14 +2669,14 @@ void TParseContext::reservedErrorCheck(const TSourceLoc& loc, const TString& ide
if (builtInName(identifier))
error(loc, "identifiers starting with \"gl_\" are reserved", identifier.c_str(), "");
- // "__" are not supposed to be an error. ES 310 (and desktop) added the clarification:
+ // "__" are not supposed to be an error. ES 300 (and desktop) added the clarification:
// "In addition, all identifiers containing two consecutive underscores (__) are
// reserved; using such a name does not itself result in an error, but may result
// in undefined behavior."
// however, before that, ES tests required an error.
if (identifier.find("__") != TString::npos) {
- if (profile == EEsProfile && version <= 300)
- error(loc, "identifiers containing consecutive underscores (\"__\") are reserved, and an error if version <= 300", identifier.c_str(), "");
+ if (isEsProfile() && version < 300)
+ error(loc, "identifiers containing consecutive underscores (\"__\") are reserved, and an error if version < 300", identifier.c_str(), "");
else
warn(loc, "identifiers containing consecutive underscores (\"__\") are reserved", identifier.c_str(), "");
}
@@ -2606,7 +2688,7 @@ void TParseContext::reservedErrorCheck(const TSourceLoc& loc, const TString& ide
//
void TParseContext::reservedPpErrorCheck(const TSourceLoc& loc, const char* identifier, const char* op)
{
- // "__" are not supposed to be an error. ES 310 (and desktop) added the clarification:
+ // "__" are not supposed to be an error. ES 300 (and desktop) added the clarification:
// "All macro names containing two consecutive underscores ( __ ) are reserved;
// defining such a name does not itself result in an error, but may result in
// undefined behavior. All macro names prefixed with "GL_" ("GL" followed by a
@@ -2618,14 +2700,14 @@ void TParseContext::reservedPpErrorCheck(const TSourceLoc& loc, const char* iden
else if (strncmp(identifier, "defined", 8) == 0)
ppError(loc, "\"defined\" can't be (un)defined:", op, identifier);
else if (strstr(identifier, "__") != 0) {
- if (profile == EEsProfile && version >= 300 &&
+ if (isEsProfile() && version >= 300 &&
(strcmp(identifier, "__LINE__") == 0 ||
strcmp(identifier, "__FILE__") == 0 ||
strcmp(identifier, "__VERSION__") == 0))
ppError(loc, "predefined names can't be (un)defined:", op, identifier);
else {
- if (profile == EEsProfile && version <= 300)
- ppError(loc, "names containing consecutive underscores are reserved, and an error if version <= 300:", op, identifier);
+ if (isEsProfile() && version < 300)
+ ppError(loc, "names containing consecutive underscores are reserved, and an error if version < 300:", op, identifier);
else
ppWarn(loc, "names containing consecutive underscores are reserved:", op, identifier);
}
@@ -2639,10 +2721,14 @@ void TParseContext::reservedPpErrorCheck(const TSourceLoc& loc, const char* iden
//
bool TParseContext::lineContinuationCheck(const TSourceLoc& loc, bool endOfComment)
{
+#ifdef GLSLANG_WEB
+ return true;
+#endif
+
const char* message = "line continuation";
- bool lineContinuationAllowed = (profile == EEsProfile && version >= 300) ||
- (profile != EEsProfile && (version >= 420 || extensionTurnedOn(E_GL_ARB_shading_language_420pack)));
+ bool lineContinuationAllowed = (isEsProfile() && version >= 300) ||
+ (!isEsProfile() && (version >= 420 || extensionTurnedOn(E_GL_ARB_shading_language_420pack)));
if (endOfComment) {
if (lineContinuationAllowed)
@@ -2691,10 +2777,27 @@ bool TParseContext::builtInName(const TString& identifier)
//
bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, TFunction& function, TOperator op, TType& type)
{
- type.shallowCopy(function.getType());
+ // See if the constructor does not establish the main type, only requalifies
+ // it, in which case the type comes from the argument instead of from the
+ // constructor function.
+ switch (op) {
+#ifndef GLSLANG_WEB
+ case EOpConstructNonuniform:
+ if (node != nullptr && node->getAsTyped() != nullptr) {
+ type.shallowCopy(node->getAsTyped()->getType());
+ type.getQualifier().makeTemporary();
+ type.getQualifier().nonUniform = true;
+ }
+ break;
+#endif
+ default:
+ type.shallowCopy(function.getType());
+ break;
+ }
+ // See if it's a matrix
bool constructingMatrix = false;
- switch(op) {
+ switch (op) {
case EOpConstructTextureSampler:
return constructorTextureSamplerError(loc, function);
case EOpConstructMat2x2:
@@ -2706,6 +2809,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
case EOpConstructMat4x2:
case EOpConstructMat4x3:
case EOpConstructMat4x4:
+#ifndef GLSLANG_WEB
case EOpConstructDMat2x2:
case EOpConstructDMat2x3:
case EOpConstructDMat2x4:
@@ -2724,6 +2828,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
case EOpConstructF16Mat4x2:
case EOpConstructF16Mat4x3:
case EOpConstructF16Mat4x4:
+#endif
constructingMatrix = true;
break;
default:
@@ -2773,20 +2878,21 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
if (function[arg].type->isFloatingDomain())
floatArgument = true;
if (type.isStruct()) {
- if (function[arg].type->containsBasicType(EbtFloat16)) {
+ if (function[arg].type->contains16BitFloat()) {
requireFloat16Arithmetic(loc, "constructor", "can't construct structure containing 16-bit type");
}
- if (function[arg].type->containsBasicType(EbtUint16) ||
- function[arg].type->containsBasicType(EbtInt16)) {
+ if (function[arg].type->contains16BitInt()) {
requireInt16Arithmetic(loc, "constructor", "can't construct structure containing 16-bit type");
}
- if (function[arg].type->containsBasicType(EbtUint8) ||
- function[arg].type->containsBasicType(EbtInt8)) {
+ if (function[arg].type->contains8BitInt()) {
requireInt8Arithmetic(loc, "constructor", "can't construct structure containing 8-bit type");
}
}
}
+ if (op == EOpConstructNonuniform)
+ constType = false;
+#ifndef GLSLANG_WEB
switch (op) {
case EOpConstructFloat16:
case EOpConstructF16Vec2:
@@ -2826,6 +2932,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
default:
break;
}
+#endif
// inherit constness from children
if (constType) {
@@ -2834,17 +2941,24 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
if (specConstType) {
switch (op) {
case EOpConstructInt8:
- case EOpConstructUint8:
- case EOpConstructInt16:
- case EOpConstructUint16:
case EOpConstructInt:
case EOpConstructUint:
- case EOpConstructInt64:
- case EOpConstructUint64:
case EOpConstructBool:
case EOpConstructBVec2:
case EOpConstructBVec3:
case EOpConstructBVec4:
+ case EOpConstructIVec2:
+ case EOpConstructIVec3:
+ case EOpConstructIVec4:
+ case EOpConstructUVec2:
+ case EOpConstructUVec3:
+ case EOpConstructUVec4:
+#ifndef GLSLANG_WEB
+ case EOpConstructUint8:
+ case EOpConstructInt16:
+ case EOpConstructUint16:
+ case EOpConstructInt64:
+ case EOpConstructUint64:
case EOpConstructI8Vec2:
case EOpConstructI8Vec3:
case EOpConstructI8Vec4:
@@ -2857,18 +2971,13 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
case EOpConstructU16Vec2:
case EOpConstructU16Vec3:
case EOpConstructU16Vec4:
- case EOpConstructIVec2:
- case EOpConstructIVec3:
- case EOpConstructIVec4:
- case EOpConstructUVec2:
- case EOpConstructUVec3:
- case EOpConstructUVec4:
case EOpConstructI64Vec2:
case EOpConstructI64Vec3:
case EOpConstructI64Vec4:
case EOpConstructU64Vec2:
case EOpConstructU64Vec3:
case EOpConstructU64Vec4:
+#endif
// This was the list of valid ones, if they aren't converting from float
// and aren't making an array.
makeSpecConst = ! floatArgument && ! type.isArray();
@@ -2978,7 +3087,7 @@ bool TParseContext::constructorError(const TSourceLoc& loc, TIntermNode* node, T
error(loc, "cannot convert a sampler", "constructor", "");
return true;
}
- if (op != EOpConstructStruct && typed->getBasicType() == EbtAtomicUint) {
+ if (op != EOpConstructStruct && typed->isAtomic()) {
error(loc, "cannot convert an atomic_uint", "constructor", "");
return true;
}
@@ -3024,7 +3133,7 @@ bool TParseContext::constructorTextureSamplerError(const TSourceLoc& loc, const
}
// simulate the first argument's impact on the result type, so it can be compared with the encapsulated operator!=()
TSampler texture = function.getType().getSampler();
- texture.combined = false;
+ texture.setCombined(false);
texture.shadow = false;
if (texture != function[0].type->getSampler()) {
error(loc, "sampler-constructor first argument must match type and dimensionality of constructor type", token, "");
@@ -3076,14 +3185,14 @@ void TParseContext::samplerCheck(const TSourceLoc& loc, const TType& type, const
{
// Check that the appropriate extension is enabled if external sampler is used.
// There are two extensions. The correct one must be used based on GLSL version.
- if (type.getBasicType() == EbtSampler && type.getSampler().external) {
+ if (type.getBasicType() == EbtSampler && type.getSampler().isExternal()) {
if (version < 300) {
requireExtensions(loc, 1, &E_GL_OES_EGL_image_external, "samplerExternalOES");
} else {
requireExtensions(loc, 1, &E_GL_OES_EGL_image_external_essl3, "samplerExternalOES");
}
}
- if (type.getSampler().yuv) {
+ if (type.getSampler().isYuv()) {
requireExtensions(loc, 1, &E_GL_EXT_YUV_target, "__samplerExternal2DY2YEXT");
}
@@ -3100,6 +3209,8 @@ void TParseContext::samplerCheck(const TSourceLoc& loc, const TType& type, const
}
}
+#ifndef GLSLANG_WEB
+
void TParseContext::atomicUintCheck(const TSourceLoc& loc, const TType& type, const TString& identifier)
{
if (type.getQualifier().storage == EvqUniform)
@@ -3110,7 +3221,7 @@ void TParseContext::atomicUintCheck(const TSourceLoc& loc, const TType& type, co
else if (type.getBasicType() == EbtAtomicUint && type.getQualifier().storage != EvqUniform)
error(loc, "atomic_uints can only be used in uniform variables or function parameters:", type.getBasicTypeString().c_str(), identifier.c_str());
}
-#ifdef NV_EXTENSIONS
+
void TParseContext::accStructNVCheck(const TSourceLoc& loc, const TType& type, const TString& identifier)
{
if (type.getQualifier().storage == EvqUniform)
@@ -3123,7 +3234,8 @@ void TParseContext::accStructNVCheck(const TSourceLoc& loc, const TType& type, c
type.getBasicTypeString().c_str(), identifier.c_str());
}
-#endif
+
+#endif // GLSLANG_WEB
void TParseContext::transparentOpaqueCheck(const TSourceLoc& loc, const TType& type, const TString& identifier)
{
@@ -3188,7 +3300,7 @@ void TParseContext::globalQualifierFixCheck(const TSourceLoc& loc, TQualifier& q
break;
}
- if (!nonuniformOkay && qualifier.nonUniform)
+ if (!nonuniformOkay && qualifier.isNonUniform())
error(loc, "for non-parameter, can only apply to 'in' or no storage qualifier", "nonuniformEXT", "");
invariantCheck(loc, qualifier);
@@ -3202,7 +3314,7 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
if (! symbolTable.atGlobalLevel())
return;
- if (!(publicType.userDef && publicType.userDef->getBasicType() == EbtReference)) {
+ if (!(publicType.userDef && publicType.userDef->isReference())) {
if (qualifier.isMemoryQualifierImageAndSSBOOnly() && ! publicType.isImage() && publicType.qualifier.storage != EvqBuffer) {
error(loc, "memory qualifiers cannot be used on this type", "", "");
} else if (qualifier.isMemory() && (publicType.basicType != EbtSampler) && !publicType.qualifier.isUniformOrBuffer()) {
@@ -3212,13 +3324,13 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
if (qualifier.storage == EvqBuffer &&
publicType.basicType != EbtBlock &&
- !qualifier.layoutBufferReference)
+ !qualifier.hasBufferReference())
error(loc, "buffers can be declared only as blocks", "buffer", "");
if (qualifier.storage != EvqVaryingIn && qualifier.storage != EvqVaryingOut)
return;
- if (publicType.shaderQualifiers.blendEquation)
+ if (publicType.shaderQualifiers.hasBlendEquation())
error(loc, "can only be applied to a standalone 'out'", "blend equation", "");
// now, knowing it is a shader in/out, do all the in/out semantic checks
@@ -3231,25 +3343,15 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
if (isTypeInt(publicType.basicType) || publicType.basicType == EbtDouble)
profileRequires(loc, EEsProfile, 300, nullptr, "shader input/output");
- if (!qualifier.flat
-#ifdef AMD_EXTENSIONS
- && !qualifier.explicitInterp
-#endif
-#ifdef NV_EXTENSIONS
- && !qualifier.pervertexNV
-#endif
- ) {
+ if (!qualifier.flat && !qualifier.isExplicitInterpolation() && !qualifier.isPervertexNV()) {
if (isTypeInt(publicType.basicType) ||
publicType.basicType == EbtDouble ||
- (publicType.userDef && (publicType.userDef->containsBasicType(EbtInt8) ||
- publicType.userDef->containsBasicType(EbtUint8) ||
- publicType.userDef->containsBasicType(EbtInt16) ||
- publicType.userDef->containsBasicType(EbtUint16) ||
- publicType.userDef->containsBasicType(EbtInt) ||
- publicType.userDef->containsBasicType(EbtUint) ||
- publicType.userDef->containsBasicType(EbtInt64) ||
- publicType.userDef->containsBasicType(EbtUint64) ||
- publicType.userDef->containsBasicType(EbtDouble)))) {
+ (publicType.userDef && ( publicType.userDef->containsBasicType(EbtInt)
+ || publicType.userDef->containsBasicType(EbtUint)
+ || publicType.userDef->contains16BitInt()
+ || publicType.userDef->contains8BitInt()
+ || publicType.userDef->contains64BitInt()
+ || publicType.userDef->containsDouble()))) {
if (qualifier.storage == EvqVaryingIn && language == EShLangFragment)
error(loc, "must be qualified as flat", TType::getBasicString(publicType.basicType), GetStorageQualifierString(qualifier.storage));
else if (qualifier.storage == EvqVaryingOut && language == EShLangVertex && version == 300)
@@ -3257,13 +3359,11 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
}
}
- if (qualifier.patch && qualifier.isInterpolation())
+ if (qualifier.isPatch() && qualifier.isInterpolation())
error(loc, "cannot use interpolation qualifiers with patch", "patch", "");
-#ifdef NV_EXTENSIONS
- if (qualifier.perTaskNV && publicType.basicType != EbtBlock)
+ if (qualifier.isTaskMemory() && publicType.basicType != EbtBlock)
error(loc, "taskNV variables can be declared only as blocks", "taskNV", "");
-#endif
if (qualifier.storage == EvqVaryingIn) {
switch (language) {
@@ -3281,18 +3381,6 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
if (qualifier.isAuxiliary() || qualifier.isInterpolation() || qualifier.isMemory() || qualifier.invariant)
error(loc, "vertex input cannot be further qualified", "", "");
break;
-
- case EShLangTessControl:
- if (qualifier.patch)
- error(loc, "can only use on output in tessellation-control shader", "patch", "");
- break;
-
- case EShLangTessEvaluation:
- break;
-
- case EShLangGeometry:
- break;
-
case EShLangFragment:
if (publicType.userDef) {
profileRequires(loc, EEsProfile, 300, nullptr, "fragment-shader struct input");
@@ -3303,12 +3391,16 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
requireProfile(loc, ~EEsProfile, "fragment-shader struct input containing an array");
}
break;
-
- case EShLangCompute:
+ case EShLangCompute:
if (! symbolTable.atBuiltInLevel())
error(loc, "global storage input qualifier cannot be used in a compute shader", "in", "");
break;
-
+#ifndef GLSLANG_WEB
+ case EShLangTessControl:
+ if (qualifier.patch)
+ error(loc, "can only use on output in tessellation-control shader", "patch", "");
+ break;
+#endif
default:
break;
}
@@ -3326,18 +3418,6 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
}
break;
-
- case EShLangTessControl:
- break;
-
- case EShLangTessEvaluation:
- if (qualifier.patch)
- error(loc, "can only use on input in tessellation-evaluation shader", "patch", "");
- break;
-
- case EShLangGeometry:
- break;
-
case EShLangFragment:
profileRequires(loc, EEsProfile, 300, nullptr, "fragment shader output");
if (publicType.basicType == EbtStruct) {
@@ -3359,7 +3439,12 @@ void TParseContext::globalQualifierTypeCheck(const TSourceLoc& loc, const TQuali
case EShLangCompute:
error(loc, "global storage output qualifier cannot be used in a compute shader", "out", "");
break;
-
+#ifndef GLSLANG_WEB
+ case EShLangTessEvaluation:
+ if (qualifier.patch)
+ error(loc, "can only use on input in tessellation-evaluation shader", "patch", "");
+ break;
+#endif
default:
break;
}
@@ -3383,18 +3468,14 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
// Multiple interpolation qualifiers (mostly done later by 'individual qualifiers')
if (src.isInterpolation() && dst.isInterpolation())
-#ifdef AMD_EXTENSIONS
error(loc, "can only have one interpolation qualifier (flat, smooth, noperspective, __explicitInterpAMD)", "", "");
-#else
- error(loc, "can only have one interpolation qualifier (flat, smooth, noperspective)", "", "");
-#endif
// Ordering
- if (! force && ((profile != EEsProfile && version < 420) ||
- (profile == EEsProfile && version < 310))
+ if (! force && ((!isEsProfile() && version < 420) ||
+ (isEsProfile() && version < 310))
&& ! extensionTurnedOn(E_GL_ARB_shading_language_420pack)) {
// non-function parameters
- if (src.noContraction && (dst.invariant || dst.isInterpolation() || dst.isAuxiliary() || dst.storage != EvqTemporary || dst.precision != EpqNone))
+ if (src.isNoContraction() && (dst.invariant || dst.isInterpolation() || dst.isAuxiliary() || dst.storage != EvqTemporary || dst.precision != EpqNone))
error(loc, "precise qualifier must appear first", "", "");
if (src.invariant && (dst.isInterpolation() || dst.isAuxiliary() || dst.storage != EvqTemporary || dst.precision != EpqNone))
error(loc, "invariant qualifier must appear before interpolation, storage, and precision qualifiers ", "", "");
@@ -3406,7 +3487,7 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
error(loc, "precision qualifier must appear as last qualifier", "", "");
// function parameters
- if (src.noContraction && (dst.storage == EvqConst || dst.storage == EvqIn || dst.storage == EvqOut))
+ if (src.isNoContraction() && (dst.storage == EvqConst || dst.storage == EvqIn || dst.storage == EvqOut))
error(loc, "precise qualifier must appear first", "", "");
if (src.storage == EvqConst && (dst.storage == EvqIn || dst.storage == EvqOut))
error(loc, "in/out must appear before const", "", "");
@@ -3431,6 +3512,7 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
if (dst.precision == EpqNone || (force && src.precision != EpqNone))
dst.precision = src.precision;
+#ifndef GLSLANG_WEB
if (!force && ((src.coherent && (dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent)) ||
(src.devicecoherent && (dst.coherent || dst.queuefamilycoherent || dst.workgroupcoherent || dst.subgroupcoherent)) ||
(src.queuefamilycoherent && (dst.coherent || dst.devicecoherent || dst.workgroupcoherent || dst.subgroupcoherent)) ||
@@ -3438,6 +3520,7 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
(src.subgroupcoherent && (dst.coherent || dst.devicecoherent || dst.queuefamilycoherent || dst.workgroupcoherent)))) {
error(loc, "only one coherent/devicecoherent/queuefamilycoherent/workgroupcoherent/subgroupcoherent qualifier allowed", GetPrecisionQualifierString(src.precision), "");
}
+#endif
// Layout qualifiers
mergeObjectLayoutQualifiers(dst, src, false);
@@ -3445,19 +3528,17 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
bool repeated = false;
#define MERGE_SINGLETON(field) repeated |= dst.field && src.field; dst.field |= src.field;
MERGE_SINGLETON(invariant);
- MERGE_SINGLETON(noContraction);
MERGE_SINGLETON(centroid);
MERGE_SINGLETON(smooth);
MERGE_SINGLETON(flat);
+ MERGE_SINGLETON(specConstant);
+#ifndef GLSLANG_WEB
+ MERGE_SINGLETON(noContraction);
MERGE_SINGLETON(nopersp);
-#ifdef AMD_EXTENSIONS
MERGE_SINGLETON(explicitInterp);
-#endif
-#ifdef NV_EXTENSIONS
MERGE_SINGLETON(perPrimitiveNV);
MERGE_SINGLETON(perViewNV);
MERGE_SINGLETON(perTaskNV);
-#endif
MERGE_SINGLETON(patch);
MERGE_SINGLETON(sample);
MERGE_SINGLETON(coherent);
@@ -3470,8 +3551,8 @@ void TParseContext::mergeQualifiers(const TSourceLoc& loc, TQualifier& dst, cons
MERGE_SINGLETON(restrict);
MERGE_SINGLETON(readonly);
MERGE_SINGLETON(writeonly);
- MERGE_SINGLETON(specConstant);
MERGE_SINGLETON(nonUniform);
+#endif
if (repeated)
error(loc, "replicated qualifiers", "", "");
@@ -3514,11 +3595,11 @@ void TParseContext::setDefaultPrecision(const TSourceLoc& loc, TPublicType& publ
// correlates with the declaration of defaultSamplerPrecision[]
int TParseContext::computeSamplerTypeIndex(TSampler& sampler)
{
- int arrayIndex = sampler.arrayed ? 1 : 0;
- int shadowIndex = sampler.shadow ? 1 : 0;
- int externalIndex = sampler.external? 1 : 0;
- int imageIndex = sampler.image ? 1 : 0;
- int msIndex = sampler.ms ? 1 : 0;
+ int arrayIndex = sampler.arrayed ? 1 : 0;
+ int shadowIndex = sampler.shadow ? 1 : 0;
+ int externalIndex = sampler.isExternal() ? 1 : 0;
+ int imageIndex = sampler.isImageClass() ? 1 : 0;
+ int msIndex = sampler.isMultiSample() ? 1 : 0;
int flattened = EsdNumDims * (EbtNumTypes * (2 * (2 * (2 * (2 * arrayIndex + msIndex) + imageIndex) + shadowIndex) +
externalIndex) + sampler.type) + sampler.dim;
@@ -3542,8 +3623,10 @@ void TParseContext::precisionQualifierCheck(const TSourceLoc& loc, TBasicType ba
if (! obeyPrecisionQualifiers() || parsingBuiltins)
return;
+#ifndef GLSLANG_WEB
if (baseType == EbtAtomicUint && qualifier.precision != EpqNone && qualifier.precision != EpqHigh)
error(loc, "atomic counters can only be highp", "atomic_uint", "");
+#endif
if (baseType == EbtFloat || baseType == EbtUint || baseType == EbtInt || baseType == EbtSampler || baseType == EbtAtomicUint) {
if (qualifier.precision == EpqNone) {
@@ -3562,8 +3645,7 @@ void TParseContext::parameterTypeCheck(const TSourceLoc& loc, TStorageQualifier
{
if ((qualifier == EvqOut || qualifier == EvqInOut) && type.isOpaque())
error(loc, "samplers and atomic_uints cannot be output parameters", type.getBasicTypeString().c_str(), "");
-
- if (!parsingBuiltins && type.containsBasicType(EbtFloat16))
+ if (!parsingBuiltins && type.contains16BitFloat())
requireFloat16Arithmetic(loc, type.getBasicTypeString().c_str(), "float16 types can only be in uniform block or buffer storage");
if (!parsingBuiltins && type.contains16BitInt())
requireInt16Arithmetic(loc, type.getBasicTypeString().c_str(), "(u)int16 types can only be in uniform block or buffer storage");
@@ -3726,51 +3808,53 @@ void TParseContext::arraySizesCheck(const TSourceLoc& loc, const TQualifier& qua
(qualifier.storage != EvqTemporary && qualifier.storage != EvqGlobal && qualifier.storage != EvqShared && qualifier.storage != EvqConst))
error(loc, "only outermost dimension of an array of arrays can be a specialization constant", "[]", "");
+#ifndef GLSLANG_WEB
+
// desktop always allows outer-dimension-unsized variable arrays,
- if (profile != EEsProfile)
+ if (!isEsProfile())
return;
// for ES, if size isn't coming from an initializer, it has to be explicitly declared now,
// with very few exceptions
- // last member of ssbo block exception:
- if (qualifier.storage == EvqBuffer && lastMember)
- return;
-
// implicitly-sized io exceptions:
switch (language) {
case EShLangGeometry:
if (qualifier.storage == EvqVaryingIn)
- if ((profile == EEsProfile && version >= 320) ||
+ if ((isEsProfile() && version >= 320) ||
extensionsTurnedOn(Num_AEP_geometry_shader, AEP_geometry_shader))
return;
break;
case EShLangTessControl:
if ( qualifier.storage == EvqVaryingIn ||
- (qualifier.storage == EvqVaryingOut && ! qualifier.patch))
- if ((profile == EEsProfile && version >= 320) ||
+ (qualifier.storage == EvqVaryingOut && ! qualifier.isPatch()))
+ if ((isEsProfile() && version >= 320) ||
extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader))
return;
break;
case EShLangTessEvaluation:
- if ((qualifier.storage == EvqVaryingIn && ! qualifier.patch) ||
+ if ((qualifier.storage == EvqVaryingIn && ! qualifier.isPatch()) ||
qualifier.storage == EvqVaryingOut)
- if ((profile == EEsProfile && version >= 320) ||
+ if ((isEsProfile() && version >= 320) ||
extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader))
return;
break;
-#ifdef NV_EXTENSIONS
case EShLangMeshNV:
if (qualifier.storage == EvqVaryingOut)
- if ((profile == EEsProfile && version >= 320) ||
+ if ((isEsProfile() && version >= 320) ||
extensionTurnedOn(E_GL_NV_mesh_shader))
return;
break;
-#endif
default:
break;
}
+#endif
+
+ // last member of ssbo block exception:
+ if (qualifier.storage == EvqBuffer && lastMember)
+ return;
+
arraySizeRequiredCheck(loc, *arraySizes);
}
@@ -3811,6 +3895,7 @@ void TParseContext::declareArray(const TSourceLoc& loc, const TString& identifie
if (symbolTable.atGlobalLevel())
trackLinkage(*symbol);
+#ifndef GLSLANG_WEB
if (! symbolTable.atBuiltInLevel()) {
if (isIoResizeArray(type)) {
ioArraySymbolResizeList.push_back(symbol);
@@ -3818,6 +3903,7 @@ void TParseContext::declareArray(const TSourceLoc& loc, const TString& identifie
} else
fixIoArraySize(loc, symbol->getWritableType());
}
+#endif
return;
}
@@ -3855,6 +3941,7 @@ void TParseContext::declareArray(const TSourceLoc& loc, const TString& identifie
return;
}
+#ifndef GLSLANG_WEB
if (existingType.isSizedArray()) {
// be more leniant for input arrays to geometry shaders and tessellation control outputs, where the redeclaration is the same size
if (! (isIoResizeArray(type) && existingType.getOuterArraySize() == type.getOuterArraySize()))
@@ -3868,8 +3955,11 @@ void TParseContext::declareArray(const TSourceLoc& loc, const TString& identifie
if (isIoResizeArray(type))
checkIoArraysConsistency(loc);
+#endif
}
+#ifndef GLSLANG_WEB
+
// Policy and error check for needing a runtime sized array.
void TParseContext::checkRuntimeSizable(const TSourceLoc& loc, const TIntermTyped& base)
{
@@ -3883,7 +3973,7 @@ void TParseContext::checkRuntimeSizable(const TSourceLoc& loc, const TIntermType
const TIntermBinary* binary = base.getAsBinaryNode();
if (binary != nullptr &&
binary->getOp() == EOpIndexDirectStruct &&
- binary->getLeft()->getBasicType() == EbtReference) {
+ binary->getLeft()->isReference()) {
const int index = binary->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst();
const int memberCount = (int)binary->getLeft()->getType().getReferentType()->getStruct()->size();
@@ -3893,8 +3983,8 @@ void TParseContext::checkRuntimeSizable(const TSourceLoc& loc, const TIntermType
}
// check for additional things allowed by GL_EXT_nonuniform_qualifier
- if (base.getBasicType() == EbtSampler ||
- (base.getBasicType() == EbtBlock && base.getType().getQualifier().isUniformOrBuffer()))
+ if (base.getBasicType() == EbtSampler || base.getBasicType() == EbtAccStructNV ||
+ (base.getBasicType() == EbtBlock && base.getType().getQualifier().isUniformOrBuffer()))
requireExtensions(loc, 1, &E_GL_EXT_nonuniform_qualifier, "variable index");
else
error(loc, "", "[", "array must be redeclared with a size before being indexed with a variable");
@@ -3910,7 +4000,7 @@ bool TParseContext::isRuntimeLength(const TIntermTyped& base) const
// is it the last member?
const int index = binary->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst();
- if (binary->getLeft()->getBasicType() == EbtReference)
+ if (binary->getLeft()->isReference())
return false;
const int memberCount = (int)binary->getLeft()->getType().getStruct()->size();
@@ -3922,27 +4012,34 @@ bool TParseContext::isRuntimeLength(const TIntermTyped& base) const
return false;
}
-#ifdef NV_EXTENSIONS
-// Fix mesh view output array dimension
-void TParseContext::resizeMeshViewDimension(const TSourceLoc& loc, TType& type)
+// Check if mesh perviewNV attributes have a view dimension
+// and resize it to gl_MaxMeshViewCountNV when implicitly sized.
+void TParseContext::checkAndResizeMeshViewDim(const TSourceLoc& loc, TType& type, bool isBlockMember)
{
// see if member is a per-view attribute
- if (type.getQualifier().isPerView()) {
- // since we don't have the maxMeshViewCountNV set during parsing builtins, we hardcode the value
+ if (!type.getQualifier().isPerView())
+ return;
+
+ if ((isBlockMember && type.isArray()) || (!isBlockMember && type.isArrayOfArrays())) {
+ // since we don't have the maxMeshViewCountNV set during parsing builtins, we hardcode the value.
int maxViewCount = parsingBuiltins ? 4 : resources.maxMeshViewCountNV;
+ // For block members, outermost array dimension is the view dimension.
+ // For non-block members, outermost array dimension is the vertex/primitive dimension
+ // and 2nd outermost is the view dimension.
+ int viewDim = isBlockMember ? 0 : 1;
+ int viewDimSize = type.getArraySizes()->getDimSize(viewDim);
- if (! type.isArray()) {
- error(loc, "requires an view array dimension", "perviewNV", "");
- }
- else if (!type.isUnsizedArray() && type.getOuterArraySize() != maxViewCount) {
+ if (viewDimSize != UnsizedArraySize && viewDimSize != maxViewCount)
error(loc, "mesh view output array size must be gl_MaxMeshViewCountNV or implicitly sized", "[]", "");
- }
- else if (type.isUnsizedArray()) {
- type.changeOuterArraySize(maxViewCount);
- }
+ else if (viewDimSize == UnsizedArraySize)
+ type.getArraySizes()->setDimSize(viewDim, maxViewCount);
+ }
+ else {
+ error(loc, "requires a view array dimension", "perviewNV", "");
}
}
-#endif
+
+#endif // GLSLANG_WEB
// Returns true if the first argument to the #line directive is the line number for the next line.
//
@@ -3955,7 +4052,7 @@ void TParseContext::resizeMeshViewDimension(const TSourceLoc& loc, TType& type)
// source string number source-string-number.
bool TParseContext::lineDirectiveShouldSetNextLine() const
{
- return profile == EEsProfile || version >= 330;
+ return isEsProfile() || version >= 330;
}
//
@@ -3986,18 +4083,19 @@ void TParseContext::nonInitConstCheck(const TSourceLoc& loc, TString& identifier
TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TString& identifier,
const TQualifier& qualifier, const TShaderQualifiers& publicType)
{
+#ifndef GLSLANG_WEB
if (! builtInName(identifier) || symbolTable.atBuiltInLevel() || ! symbolTable.atGlobalLevel())
return nullptr;
- bool nonEsRedecls = (profile != EEsProfile && (version >= 130 || identifier == "gl_TexCoord"));
- bool esRedecls = (profile == EEsProfile &&
+ bool nonEsRedecls = (!isEsProfile() && (version >= 130 || identifier == "gl_TexCoord"));
+ bool esRedecls = (isEsProfile() &&
(version >= 320 || extensionsTurnedOn(Num_AEP_shader_io_blocks, AEP_shader_io_blocks)));
if (! esRedecls && ! nonEsRedecls)
return nullptr;
// Special case when using GL_ARB_separate_shader_objects
bool ssoPre150 = false; // means the only reason this variable is redeclared is due to this combination
- if (profile != EEsProfile && version <= 140 && extensionTurnedOn(E_GL_ARB_separate_shader_objects)) {
+ if (!isEsProfile() && version <= 140 && extensionTurnedOn(E_GL_ARB_separate_shader_objects)) {
if (identifier == "gl_Position" ||
identifier == "gl_PointSize" ||
identifier == "gl_ClipVertex" ||
@@ -4020,11 +4118,9 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
(identifier == "gl_Color" && language == EShLangFragment) ||
(identifier == "gl_FragStencilRefARB" && (nonEsRedecls && version >= 140)
&& language == EShLangFragment) ||
-#ifdef NV_EXTENSIONS
identifier == "gl_SampleMask" ||
identifier == "gl_Layer" ||
identifier == "gl_PrimitiveIndicesNV" ||
-#endif
identifier == "gl_TexCoord") {
// Find the existing symbol, if any.
@@ -4104,16 +4200,13 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
}
}
else if (
-#ifdef NV_EXTENSIONS
identifier == "gl_PrimitiveIndicesNV" ||
-#endif
identifier == "gl_FragStencilRefARB") {
if (qualifier.hasLayout())
error(loc, "cannot apply layout qualifier to", "redeclaration", symbol->getName().c_str());
if (qualifier.storage != EvqVaryingOut)
error(loc, "cannot change output storage qualification of", "redeclaration", symbol->getName().c_str());
}
-#ifdef NV_EXTENSIONS
else if (identifier == "gl_SampleMask") {
if (!publicType.layoutOverrideCoverage) {
error(loc, "redeclaration only allowed for override_coverage layout", "redeclaration", symbol->getName().c_str());
@@ -4126,12 +4219,12 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
symbolQualifier.layoutViewportRelative = qualifier.layoutViewportRelative;
symbolQualifier.layoutSecondaryViewportRelativeOffset = qualifier.layoutSecondaryViewportRelativeOffset;
}
-#endif
// TODO: semantics quality: separate smooth from nothing declared, then use IsInterpolation for several tests above
return symbol;
}
+#endif
return nullptr;
}
@@ -4143,16 +4236,13 @@ TSymbol* TParseContext::redeclareBuiltinVariable(const TSourceLoc& loc, const TS
void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newTypeList, const TString& blockName,
const TString* instanceName, TArraySizes* arraySizes)
{
+#ifndef GLSLANG_WEB
const char* feature = "built-in block redeclaration";
profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, feature);
profileRequires(loc, ~EEsProfile, 410, E_GL_ARB_separate_shader_objects, feature);
- if (blockName != "gl_PerVertex" && blockName != "gl_PerFragment"
-#ifdef NV_EXTENSIONS
- && blockName != "gl_MeshPerVertexNV" && blockName != "gl_MeshPerPrimitiveNV"
-#endif
- )
- {
+ if (blockName != "gl_PerVertex" && blockName != "gl_PerFragment" &&
+ blockName != "gl_MeshPerVertexNV" && blockName != "gl_MeshPerPrimitiveNV") {
error(loc, "cannot redeclare block: ", "block declaration", blockName.c_str());
return;
}
@@ -4211,7 +4301,6 @@ void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newT
TType& type = block->getWritableType();
-#ifdef NV_EXTENSIONS
// if gl_PerVertex is redeclared for the purpose of passing through "gl_Position"
// for passthrough purpose, the redeclared block should have the same qualifers as
// the current one
@@ -4221,7 +4310,6 @@ void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newT
type.getQualifier().layoutStream = currentBlockQualifier.layoutStream;
type.getQualifier().layoutXfbBuffer = currentBlockQualifier.layoutXfbBuffer;
}
-#endif
TTypeList::iterator member = type.getWritableStruct()->begin();
size_t numOriginalMembersFound = 0;
@@ -4254,7 +4342,6 @@ void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newT
error(memberLoc, "cannot change array size of redeclared block member", member->type->getFieldName().c_str(), "");
else if (! oldType.getQualifier().isPerView() && newType.isArray())
arrayLimitCheck(loc, member->type->getFieldName(), newType.getOuterArraySize());
-#ifdef NV_EXTENSIONS
if (oldType.getQualifier().isPerView() && ! newType.getQualifier().isPerView())
error(memberLoc, "missing perviewNV qualifier to redeclared block member", member->type->getFieldName().c_str(), "");
else if (! oldType.getQualifier().isPerView() && newType.getQualifier().isPerView())
@@ -4274,7 +4361,6 @@ void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newT
error(memberLoc, "missing perprimitiveNV qualifier to redeclared block member", member->type->getFieldName().c_str(), "");
else if (! oldType.getQualifier().isPerPrimitive() && newType.getQualifier().isPerPrimitive())
error(memberLoc, "cannot add perprimitiveNV qualifier to redeclared block member", member->type->getFieldName().c_str(), "");
-#endif
if (newType.getQualifier().isMemory())
error(memberLoc, "cannot add memory qualifier to redeclared block member", member->type->getFieldName().c_str(), "");
if (newType.getQualifier().hasNonXfbLayout())
@@ -4365,6 +4451,7 @@ void TParseContext::redeclareBuiltinBlock(const TSourceLoc& loc, TTypeList& newT
// Save it in the AST for linker use.
trackLinkage(*block);
+#endif // GLSLANG_WEB
}
void TParseContext::paramCheckFixStorage(const TSourceLoc& loc, const TStorageQualifier& qualifier, TType& type)
@@ -4392,6 +4479,7 @@ void TParseContext::paramCheckFixStorage(const TSourceLoc& loc, const TStorageQu
void TParseContext::paramCheckFix(const TSourceLoc& loc, const TQualifier& qualifier, TType& type)
{
+#ifndef GLSLANG_WEB
if (qualifier.isMemory()) {
type.getQualifier().volatil = qualifier.volatil;
type.getQualifier().coherent = qualifier.coherent;
@@ -4404,6 +4492,7 @@ void TParseContext::paramCheckFix(const TSourceLoc& loc, const TQualifier& quali
type.getQualifier().writeonly = qualifier.writeonly;
type.getQualifier().restrict = qualifier.restrict;
}
+#endif
if (qualifier.isAuxiliary() ||
qualifier.isInterpolation())
@@ -4412,9 +4501,9 @@ void TParseContext::paramCheckFix(const TSourceLoc& loc, const TQualifier& quali
error(loc, "cannot use layout qualifiers on a function parameter", "", "");
if (qualifier.invariant)
error(loc, "cannot use invariant qualifier on a function parameter", "", "");
- if (qualifier.noContraction) {
+ if (qualifier.isNoContraction()) {
if (qualifier.isParamOutput())
- type.getQualifier().noContraction = true;
+ type.getQualifier().setNoContraction();
else
warn(loc, "qualifier has no effect on non-output parameters", "precise", "");
}
@@ -4455,12 +4544,15 @@ void TParseContext::opaqueCheck(const TSourceLoc& loc, const TType& type, const
void TParseContext::referenceCheck(const TSourceLoc& loc, const TType& type, const char* op)
{
+#ifndef GLSLANG_WEB
if (containsFieldWithBasicType(type, EbtReference))
error(loc, "can't use with reference types", op, "");
+#endif
}
void TParseContext::storage16BitAssignmentCheck(const TSourceLoc& loc, const TType& type, const char* op)
{
+#ifndef GLSLANG_WEB
if (type.getBasicType() == EbtStruct && containsFieldWithBasicType(type, EbtFloat16))
requireFloat16Arithmetic(loc, op, "can't use with structs containing float16");
@@ -4490,6 +4582,7 @@ void TParseContext::storage16BitAssignmentCheck(const TSourceLoc& loc, const TTy
if (type.isArray() && type.getBasicType() == EbtUint8)
requireInt8Arithmetic(loc, op, "can't use with arrays containing uint8");
+#endif
}
void TParseContext::specializationCheck(const TSourceLoc& loc, const TType& type, const char* op)
@@ -4541,6 +4634,7 @@ void TParseContext::structTypeCheck(const TSourceLoc& /*loc*/, TPublicType& publ
//
void TParseContext::inductiveLoopCheck(const TSourceLoc& loc, TIntermNode* init, TIntermLoop* loop)
{
+#ifndef GLSLANG_WEB
// loop index init must exist and be a declaration, which shows up in the AST as an aggregate of size 1 of the declaration
bool badInit = false;
if (! init || ! init->getAsAggregate() || init->getAsAggregate()->getSequence().size() != 1)
@@ -4636,8 +4730,10 @@ void TParseContext::inductiveLoopCheck(const TSourceLoc& loc, TIntermNode* init,
// the body
inductiveLoopBodyCheck(loop->getBody(), loopIndex, symbolTable);
+#endif
}
+#ifndef GLSLANG_WEB
// Do limit checks for built-in arrays.
void TParseContext::arrayLimitCheck(const TSourceLoc& loc, const TString& identifier, int size)
{
@@ -4647,13 +4743,12 @@ void TParseContext::arrayLimitCheck(const TSourceLoc& loc, const TString& identi
limitCheck(loc, size, "gl_MaxClipDistances", "gl_ClipDistance array size");
else if (identifier.compare("gl_CullDistance") == 0)
limitCheck(loc, size, "gl_MaxCullDistances", "gl_CullDistance array size");
-#ifdef NV_EXTENSIONS
else if (identifier.compare("gl_ClipDistancePerViewNV") == 0)
limitCheck(loc, size, "gl_MaxClipDistances", "gl_ClipDistancePerViewNV array size");
else if (identifier.compare("gl_CullDistancePerViewNV") == 0)
limitCheck(loc, size, "gl_MaxCullDistances", "gl_CullDistancePerViewNV array size");
-#endif
}
+#endif // GLSLANG_WEB
// See if the provided value is less than or equal to the symbol indicated by limit,
// which should be a constant in the symbol table.
@@ -4667,6 +4762,8 @@ void TParseContext::limitCheck(const TSourceLoc& loc, int value, const char* lim
error(loc, "must be less than or equal to", feature, "%s (%d)", limit, constArray[0].getIConst());
}
+#ifndef GLSLANG_WEB
+
//
// Do any additional error checking, etc., once we know the parsing is done.
//
@@ -4688,33 +4785,30 @@ void TParseContext::finish()
// about the stage itself.
switch (language) {
case EShLangGeometry:
- if (profile == EEsProfile && version == 310)
+ if (isEsProfile() && version == 310)
requireExtensions(getCurrentLoc(), Num_AEP_geometry_shader, AEP_geometry_shader, "geometry shaders");
break;
case EShLangTessControl:
case EShLangTessEvaluation:
- if (profile == EEsProfile && version == 310)
+ if (isEsProfile() && version == 310)
requireExtensions(getCurrentLoc(), Num_AEP_tessellation_shader, AEP_tessellation_shader, "tessellation shaders");
- else if (profile != EEsProfile && version < 400)
+ else if (!isEsProfile() && version < 400)
requireExtensions(getCurrentLoc(), 1, &E_GL_ARB_tessellation_shader, "tessellation shaders");
break;
case EShLangCompute:
- if (profile != EEsProfile && version < 430)
+ if (!isEsProfile() && version < 430)
requireExtensions(getCurrentLoc(), 1, &E_GL_ARB_compute_shader, "compute shaders");
break;
-#ifdef NV_EXTENSIONS
case EShLangTaskNV:
requireExtensions(getCurrentLoc(), 1, &E_GL_NV_mesh_shader, "task shaders");
break;
case EShLangMeshNV:
requireExtensions(getCurrentLoc(), 1, &E_GL_NV_mesh_shader, "mesh shaders");
break;
-#endif
default:
break;
}
-#ifdef NV_EXTENSIONS
// Set default outputs for GL_NV_geometry_shader_passthrough
if (language == EShLangGeometry && extensionTurnedOn(E_SPV_NV_geometry_shader_passthrough)) {
if (intermediate.getOutputPrimitive() == ElgNone) {
@@ -4734,8 +4828,8 @@ void TParseContext::finish()
}
}
}
-#endif
}
+#endif // GLSLANG_WEB
//
// Layout qualifier stuff.
@@ -4771,6 +4865,7 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
publicType.qualifier.layoutPacking = ElpStd140;
return;
}
+#ifndef GLSLANG_WEB
if (id == TQualifier::getLayoutPackingString(ElpStd430)) {
requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, "std430");
profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, "std430");
@@ -4810,20 +4905,12 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
intermediate.setUsePhysicalStorageBuffer();
return;
}
- if (language == EShLangGeometry || language == EShLangTessEvaluation
-#ifdef NV_EXTENSIONS
- || language == EShLangMeshNV
-#endif
- ) {
+ if (language == EShLangGeometry || language == EShLangTessEvaluation || language == EShLangMeshNV) {
if (id == TQualifier::getGeometryString(ElgTriangles)) {
publicType.shaderQualifiers.geometry = ElgTriangles;
return;
}
- if (language == EShLangGeometry
-#ifdef NV_EXTENSIONS
- || language == EShLangMeshNV
-#endif
- ) {
+ if (language == EShLangGeometry || language == EShLangMeshNV) {
if (id == TQualifier::getGeometryString(ElgPoints)) {
publicType.shaderQualifiers.geometry = ElgPoints;
return;
@@ -4832,10 +4919,7 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
publicType.shaderQualifiers.geometry = ElgLines;
return;
}
-#ifdef NV_EXTENSIONS
- if (language == EShLangGeometry)
-#endif
- {
+ if (language == EShLangGeometry) {
if (id == TQualifier::getGeometryString(ElgLineStrip)) {
publicType.shaderQualifiers.geometry = ElgLineStrip;
return;
@@ -4852,14 +4936,12 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
publicType.shaderQualifiers.geometry = ElgTriangleStrip;
return;
}
-#ifdef NV_EXTENSIONS
if (id == "passthrough") {
requireExtensions(loc, 1, &E_SPV_NV_geometry_shader_passthrough, "geometry shader passthrough");
publicType.qualifier.layoutPassthrough = true;
intermediate.setGeoPassthroughEXT();
return;
}
-#endif
}
} else {
assert(language == EShLangTessEvaluation);
@@ -4942,6 +5024,17 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
}
}
+ for (TInterlockOrdering order = (TInterlockOrdering)(EioNone + 1); order < EioCount; order = (TInterlockOrdering)(order+1)) {
+ if (id == TQualifier::getInterlockOrderingString(order)) {
+ requireProfile(loc, ECoreProfile | ECompatibilityProfile, "fragment shader interlock layout qualifier");
+ profileRequires(loc, ECoreProfile | ECompatibilityProfile, 450, nullptr, "fragment shader interlock layout qualifier");
+ requireExtensions(loc, 1, &E_GL_ARB_fragment_shader_interlock, TQualifier::getInterlockOrderingString(order));
+ if (order == EioShadingRateInterlockOrdered || order == EioShadingRateInterlockUnordered)
+ requireExtensions(loc, 1, &E_GL_NV_shading_rate_image, TQualifier::getInterlockOrderingString(order));
+ publicType.shaderQualifiers.interlockOrdering = order;
+ return;
+ }
+ }
if (id.compare(0, 13, "blend_support") == 0) {
bool found = false;
for (TBlendEquationShift be = (TBlendEquationShift)0; be < EBlendCount; be = (TBlendEquationShift)(be + 1)) {
@@ -4958,7 +5051,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
error(loc, "unknown blend equation", "blend_support", "");
return;
}
-#ifdef NV_EXTENSIONS
if (id == "override_coverage") {
requireExtensions(loc, 1, &E_GL_NV_sample_mask_override_coverage, "sample mask override coverage");
publicType.shaderQualifiers.layoutOverrideCoverage = true;
@@ -4996,9 +5088,8 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
}
}
-#else
- }
#endif
+
error(loc, "unrecognized layout identifier, or qualifier requires assignment (e.g., binding = 4)", id.c_str(), "");
}
@@ -5063,7 +5154,8 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
} else if (id == "location") {
profileRequires(loc, EEsProfile, 300, nullptr, "location");
- const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location };
+ const char* exts[2] = { E_GL_ARB_separate_shader_objects, E_GL_ARB_explicit_attrib_location };
+ // GL_ARB_explicit_uniform_location requires 330 or GL_ARB_explicit_attrib_location we do not need to add it here
profileRequires(loc, ~EEsProfile, 330, 2, exts, "location");
if ((unsigned int)value >= TQualifier::layoutLocationEnd)
error(loc, "location is too large", id.c_str(), "");
@@ -5083,8 +5175,10 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
error(loc, "needs a literal integer", "set", "");
return;
} else if (id == "binding") {
+#ifndef GLSLANG_WEB
profileRequires(loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, "binding");
profileRequires(loc, EEsProfile, 310, nullptr, "binding");
+#endif
if ((unsigned int)value >= TQualifier::layoutBindingEnd)
error(loc, "binding is too large", id.c_str(), "");
else
@@ -5092,7 +5186,23 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
if (nonLiteral)
error(loc, "needs a literal integer", "binding", "");
return;
- } else if (id == "component") {
+ }
+ if (id == "constant_id") {
+ requireSpv(loc, "constant_id");
+ if (value >= (int)TQualifier::layoutSpecConstantIdEnd) {
+ error(loc, "specialization-constant id is too large", id.c_str(), "");
+ } else {
+ publicType.qualifier.layoutSpecConstantId = value;
+ publicType.qualifier.specConstant = true;
+ if (! intermediate.addUsedConstantId(value))
+ error(loc, "specialization-constant id already used", id.c_str(), "");
+ }
+ if (nonLiteral)
+ error(loc, "needs a literal integer", "constant_id", "");
+ return;
+ }
+#ifndef GLSLANG_WEB
+ if (id == "component") {
requireProfile(loc, ECoreProfile | ECompatibilityProfile, "component");
profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, "component");
if ((unsigned)value >= TQualifier::layoutComponentEnd)
@@ -5102,7 +5212,8 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
if (nonLiteral)
error(loc, "needs a literal integer", "component", "");
return;
- } else if (id.compare(0, 4, "xfb_") == 0) {
+ }
+ if (id.compare(0, 4, "xfb_") == 0) {
// "Any shader making any static use (after preprocessing) of any of these
// *xfb_* qualifiers will cause the shader to be in a transform feedback
// capturing mode and hence responsible for describing the transform feedback
@@ -5148,7 +5259,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
}
}
-
if (id == "input_attachment_index") {
requireVulkan(loc, "input_attachment_index");
if (value >= (int)TQualifier::layoutAttachmentEnd)
@@ -5159,20 +5269,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
error(loc, "needs a literal integer", "input_attachment_index", "");
return;
}
- if (id == "constant_id") {
- requireSpv(loc, "constant_id");
- if (value >= (int)TQualifier::layoutSpecConstantIdEnd) {
- error(loc, "specialization-constant id is too large", id.c_str(), "");
- } else {
- publicType.qualifier.layoutSpecConstantId = value;
- publicType.qualifier.specConstant = true;
- if (! intermediate.addUsedConstantId(value))
- error(loc, "specialization-constant id already used", id.c_str(), "");
- }
- if (nonLiteral)
- error(loc, "needs a literal integer", "constant_id", "");
- return;
- }
if (id == "num_views") {
requireExtensions(loc, Num_OVR_multiview_EXTs, OVR_multiview_EXTs, "num_views");
publicType.shaderQualifiers.numViews = value;
@@ -5180,8 +5276,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
error(loc, "needs a literal integer", "num_views", "");
return;
}
-
-#if NV_EXTENSIONS
if (language == EShLangVertex ||
language == EShLangTessControl ||
language == EShLangTessEvaluation ||
@@ -5194,7 +5288,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
return;
}
}
-#endif
if (id == "buffer_reference_align") {
requireExtensions(loc, 1, &E_GL_EXT_buffer_reference, "buffer_reference_align");
@@ -5206,11 +5299,10 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
error(loc, "needs a literal integer", "buffer_reference_align", "");
return;
}
+#endif
switch (language) {
- case EShLangVertex:
- break;
-
+#ifndef GLSLANG_WEB
case EShLangTessControl:
if (id == "vertices") {
if (value == 0)
@@ -5223,9 +5315,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
break;
- case EShLangTessEvaluation:
- break;
-
case EShLangGeometry:
if (id == "invocations") {
profileRequires(loc, ECompatibilityProfile | ECoreProfile, 400, nullptr, "invocations");
@@ -5275,7 +5364,6 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
break;
-#ifdef NV_EXTENSIONS
case EShLangMeshNV:
if (id == "max_vertices") {
requireExtensions(loc, 1, &E_GL_NV_mesh_shader, "max_vertices");
@@ -5302,16 +5390,14 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
#endif
case EShLangCompute:
if (id.compare(0, 11, "local_size_") == 0) {
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
if (language == EShLangMeshNV || language == EShLangTaskNV) {
requireExtensions(loc, 1, &E_GL_NV_mesh_shader, "gl_WorkGroupSize");
- }
- else
-#endif
- {
+ } else {
profileRequires(loc, EEsProfile, 310, 0, "gl_WorkGroupSize");
profileRequires(loc, ~EEsProfile, 430, E_GL_ARB_compute_shader, "gl_WorkGroupSize");
}
+#endif
if (nonLiteral)
error(loc, "needs a literal integer", "local_size", "");
if (id.size() == 12 && value == 0) {
@@ -5320,14 +5406,17 @@ void TParseContext::setLayoutQualifier(const TSourceLoc& loc, TPublicType& publi
}
if (id == "local_size_x") {
publicType.shaderQualifiers.localSize[0] = value;
+ publicType.shaderQualifiers.localSizeNotDefault[0] = true;
return;
}
if (id == "local_size_y") {
publicType.shaderQualifiers.localSize[1] = value;
+ publicType.shaderQualifiers.localSizeNotDefault[1] = true;
return;
}
if (id == "local_size_z") {
publicType.shaderQualifiers.localSize[2] = value;
+ publicType.shaderQualifiers.localSizeNotDefault[2] = true;
return;
}
if (spvVersion.spv != 0) {
@@ -5375,53 +5464,50 @@ void TParseContext::mergeObjectLayoutQualifiers(TQualifier& dst, const TQualifie
if (src.hasPacking())
dst.layoutPacking = src.layoutPacking;
+#ifndef GLSLANG_WEB
if (src.hasStream())
dst.layoutStream = src.layoutStream;
-
if (src.hasFormat())
dst.layoutFormat = src.layoutFormat;
-
if (src.hasXfbBuffer())
dst.layoutXfbBuffer = src.layoutXfbBuffer;
+ if (src.hasBufferReferenceAlign())
+ dst.layoutBufferReferenceAlign = src.layoutBufferReferenceAlign;
+#endif
if (src.hasAlign())
dst.layoutAlign = src.layoutAlign;
- if (src.hasBufferReferenceAlign())
- dst.layoutBufferReferenceAlign = src.layoutBufferReferenceAlign;
-
if (! inheritOnly) {
if (src.hasLocation())
dst.layoutLocation = src.layoutLocation;
- if (src.hasComponent())
- dst.layoutComponent = src.layoutComponent;
- if (src.hasIndex())
- dst.layoutIndex = src.layoutIndex;
-
if (src.hasOffset())
dst.layoutOffset = src.layoutOffset;
-
if (src.hasSet())
dst.layoutSet = src.layoutSet;
if (src.layoutBinding != TQualifier::layoutBindingEnd)
dst.layoutBinding = src.layoutBinding;
+ if (src.hasSpecConstantId())
+ dst.layoutSpecConstantId = src.layoutSpecConstantId;
+
+#ifndef GLSLANG_WEB
+ if (src.hasComponent())
+ dst.layoutComponent = src.layoutComponent;
+ if (src.hasIndex())
+ dst.layoutIndex = src.layoutIndex;
if (src.hasXfbStride())
dst.layoutXfbStride = src.layoutXfbStride;
if (src.hasXfbOffset())
dst.layoutXfbOffset = src.layoutXfbOffset;
if (src.hasAttachment())
dst.layoutAttachment = src.layoutAttachment;
- if (src.hasSpecConstantId())
- dst.layoutSpecConstantId = src.layoutSpecConstantId;
-
if (src.layoutPushConstant)
dst.layoutPushConstant = true;
if (src.layoutBufferReference)
dst.layoutBufferReference = true;
-#ifdef NV_EXTENSIONS
if (src.layoutPassthrough)
dst.layoutPassthrough = true;
if (src.layoutViewportRelative)
@@ -5490,17 +5576,15 @@ void TParseContext::layoutObjectCheck(const TSourceLoc& loc, const TSymbol& symb
if (qualifier.hasPacking())
error(loc, "cannot specify packing on a variable declaration", "layout", "");
// "The offset qualifier can only be used on block members of blocks..."
- if (qualifier.hasOffset() && type.getBasicType() != EbtAtomicUint)
+ if (qualifier.hasOffset() && !type.isAtomic())
error(loc, "cannot specify on a variable declaration", "offset", "");
// "The align qualifier can only be used on blocks or block members..."
if (qualifier.hasAlign())
error(loc, "cannot specify on a variable declaration", "align", "");
- if (qualifier.layoutPushConstant)
+ if (qualifier.isPushConstant())
error(loc, "can only specify on a uniform block", "push_constant", "");
-#ifdef NV_EXTENSIONS
- if (qualifier.layoutShaderRecordNV)
+ if (qualifier.isShaderRecordNV())
error(loc, "can only specify on a buffer block", "shaderRecordNV", "");
-#endif
}
break;
default:
@@ -5564,17 +5648,15 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
case EvqVaryingOut:
if (type.getBasicType() == EbtBlock)
profileRequires(loc, ECoreProfile | ECompatibilityProfile, 440, E_GL_ARB_enhanced_layouts, "location qualifier on in/out block");
-#ifdef NV_EXTENSIONS
if (type.getQualifier().isTaskMemory())
error(loc, "cannot apply to taskNV in/out blocks", "location", "");
-#endif
break;
case EvqUniform:
case EvqBuffer:
if (type.getBasicType() == EbtBlock)
error(loc, "cannot apply to uniform or buffer block", "location", "");
break;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
case EvqPayloadNV:
case EvqPayloadInNV:
case EvqHitAttrNV:
@@ -5597,6 +5679,7 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
error(loc, "fragment outputs sharing the same location must be the same basic type", "location", "%d", repeated);
}
+#ifndef GLSLANG_WEB
if (qualifier.hasXfbOffset() && qualifier.hasXfbBuffer()) {
int repeated = intermediate.addXfbBufferOffset(type);
if (repeated >= 0)
@@ -5608,25 +5691,20 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
if ((type.containsBasicType(EbtDouble) || type.containsBasicType(EbtInt64) || type.containsBasicType(EbtUint64)) &&
! IsMultipleOfPow2(qualifier.layoutXfbOffset, 8))
error(loc, "type contains double or 64-bit integer; xfb_offset must be a multiple of 8", "xfb_offset", "");
-#ifdef AMD_EXTENSIONS
else if ((type.containsBasicType(EbtBool) || type.containsBasicType(EbtFloat) ||
type.containsBasicType(EbtInt) || type.containsBasicType(EbtUint)) &&
! IsMultipleOfPow2(qualifier.layoutXfbOffset, 4))
error(loc, "must be a multiple of size of first component", "xfb_offset", "");
// ..., if applied to an aggregate containing a half float or 16-bit integer, the offset must also be a multiple of 2..."
- else if ((type.containsBasicType(EbtFloat16) || type.containsBasicType(EbtInt16) || type.containsBasicType(EbtUint16)) &&
+ else if ((type.contains16BitFloat() || type.containsBasicType(EbtInt16) || type.containsBasicType(EbtUint16)) &&
!IsMultipleOfPow2(qualifier.layoutXfbOffset, 2))
error(loc, "type contains half float or 16-bit integer; xfb_offset must be a multiple of 2", "xfb_offset", "");
-#else
- else if (! IsMultipleOfPow2(qualifier.layoutXfbOffset, 4))
- error(loc, "must be a multiple of size of first component", "xfb_offset", "");
-#endif
}
-
if (qualifier.hasXfbStride() && qualifier.hasXfbBuffer()) {
if (! intermediate.setXfbBufferStride(qualifier.layoutXfbBuffer, qualifier.layoutXfbStride))
error(loc, "all stride settings must match for xfb buffer", "xfb_stride", "%d", qualifier.layoutXfbBuffer);
}
+#endif
if (qualifier.hasBinding()) {
// Binding checking, from the spec:
@@ -5649,15 +5727,19 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
lastBinding += type.getCumulativeArraySize();
else {
lastBinding += 1;
+#ifndef GLSLANG_WEB
if (spvVersion.vulkan == 0)
warn(loc, "assuming binding count of one for compile-time checking of binding numbers for unsized array", "[]", "");
+#endif
}
}
}
+#ifndef GLSLANG_WEB
if (spvVersion.vulkan == 0 && lastBinding >= resources.maxCombinedTextureImageUnits)
error(loc, "sampler binding not less than gl_MaxCombinedTextureImageUnits", "binding", type.isArray() ? "(using array)" : "");
+#endif
}
- if (type.getBasicType() == EbtAtomicUint) {
+ if (type.isAtomic()) {
if (qualifier.layoutBinding >= (unsigned int)resources.maxAtomicCounterBindings) {
error(loc, "atomic_uint binding is too large; see gl_MaxAtomicCounterBindings", "binding", "");
return;
@@ -5667,18 +5749,16 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
// some types require bindings
// atomic_uint
- if (type.getBasicType() == EbtAtomicUint)
+ if (type.isAtomic())
error(loc, "layout(binding=X) is required", "atomic_uint", "");
// SPIR-V
if (spvVersion.spv > 0) {
if (qualifier.isUniformOrBuffer()) {
- if (type.getBasicType() == EbtBlock && !qualifier.layoutPushConstant &&
-#ifdef NV_EXTENSIONS
- !qualifier.layoutShaderRecordNV &&
-#endif
- !qualifier.layoutAttachment &&
- !qualifier.layoutBufferReference)
+ if (type.getBasicType() == EbtBlock && !qualifier.isPushConstant() &&
+ !qualifier.isShaderRecordNV() &&
+ !qualifier.hasAttachment() &&
+ !qualifier.hasBufferReference())
error(loc, "uniform/buffer blocks require layout(binding=X)", "binding", "");
else if (spvVersion.vulkan > 0 && type.getBasicType() == EbtSampler)
error(loc, "sampler/texture/image requires layout(binding=X)", "binding", "");
@@ -5703,40 +5783,38 @@ void TParseContext::layoutTypeCheck(const TSourceLoc& loc, const TType& type)
// Image format
if (qualifier.hasFormat()) {
if (! type.isImage())
- error(loc, "only apply to images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), "");
+ error(loc, "only apply to images", TQualifier::getLayoutFormatString(qualifier.getFormat()), "");
else {
- if (type.getSampler().type == EbtFloat && qualifier.layoutFormat > ElfFloatGuard)
- error(loc, "does not apply to floating point images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), "");
- if (type.getSampler().type == EbtInt && (qualifier.layoutFormat < ElfFloatGuard || qualifier.layoutFormat > ElfIntGuard))
- error(loc, "does not apply to signed integer images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), "");
- if (type.getSampler().type == EbtUint && qualifier.layoutFormat < ElfIntGuard)
- error(loc, "does not apply to unsigned integer images", TQualifier::getLayoutFormatString(qualifier.layoutFormat), "");
-
- if (profile == EEsProfile) {
+ if (type.getSampler().type == EbtFloat && qualifier.getFormat() > ElfFloatGuard)
+ error(loc, "does not apply to floating point images", TQualifier::getLayoutFormatString(qualifier.getFormat()), "");
+ if (type.getSampler().type == EbtInt && (qualifier.getFormat() < ElfFloatGuard || qualifier.getFormat() > ElfIntGuard))
+ error(loc, "does not apply to signed integer images", TQualifier::getLayoutFormatString(qualifier.getFormat()), "");
+ if (type.getSampler().type == EbtUint && qualifier.getFormat() < ElfIntGuard)
+ error(loc, "does not apply to unsigned integer images", TQualifier::getLayoutFormatString(qualifier.getFormat()), "");
+
+ if (isEsProfile()) {
// "Except for image variables qualified with the format qualifiers r32f, r32i, and r32ui, image variables must
// specify either memory qualifier readonly or the memory qualifier writeonly."
- if (! (qualifier.layoutFormat == ElfR32f || qualifier.layoutFormat == ElfR32i || qualifier.layoutFormat == ElfR32ui)) {
- if (! qualifier.readonly && ! qualifier.writeonly)
- error(loc, "format requires readonly or writeonly memory qualifier", TQualifier::getLayoutFormatString(qualifier.layoutFormat), "");
+ if (! (qualifier.getFormat() == ElfR32f || qualifier.getFormat() == ElfR32i || qualifier.getFormat() == ElfR32ui)) {
+ if (! qualifier.isReadOnly() && ! qualifier.isWriteOnly())
+ error(loc, "format requires readonly or writeonly memory qualifier", TQualifier::getLayoutFormatString(qualifier.getFormat()), "");
}
}
}
- } else if (type.isImage() && ! qualifier.writeonly) {
+ } else if (type.isImage() && ! qualifier.isWriteOnly()) {
const char *explanation = "image variables not declared 'writeonly' and without a format layout qualifier";
requireProfile(loc, ECoreProfile | ECompatibilityProfile, explanation);
profileRequires(loc, ECoreProfile | ECompatibilityProfile, 0, E_GL_EXT_shader_image_load_formatted, explanation);
}
- if (qualifier.layoutPushConstant && type.getBasicType() != EbtBlock)
+ if (qualifier.isPushConstant() && type.getBasicType() != EbtBlock)
error(loc, "can only be used with a block", "push_constant", "");
- if (qualifier.layoutBufferReference && type.getBasicType() != EbtBlock)
+ if (qualifier.hasBufferReference() && type.getBasicType() != EbtBlock)
error(loc, "can only be used with a block", "buffer_reference", "");
-#ifdef NV_EXTENSIONS
- if (qualifier.layoutShaderRecordNV && type.getBasicType() != EbtBlock)
+ if (qualifier.isShaderRecordNV() && type.getBasicType() != EbtBlock)
error(loc, "can only be used with a block", "shaderRecordNV", "");
-#endif
// input attachment
if (type.isSubpass()) {
@@ -5793,10 +5871,11 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier
// output block declarations, and output block member declarations."
switch (qualifier.storage) {
+#ifndef GLSLANG_WEB
case EvqVaryingIn:
{
const char* feature = "location qualifier on input";
- if (profile == EEsProfile && version < 310)
+ if (isEsProfile() && version < 310)
requireStage(loc, EShLangVertex, feature);
else
requireStage(loc, (EShLanguageMask)~EShLangComputeMask, feature);
@@ -5813,7 +5892,7 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier
case EvqVaryingOut:
{
const char* feature = "location qualifier on output";
- if (profile == EEsProfile && version < 310)
+ if (isEsProfile() && version < 310)
requireStage(loc, EShLangFragment, feature);
else
requireStage(loc, (EShLanguageMask)~EShLangComputeMask, feature);
@@ -5827,12 +5906,14 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier
}
break;
}
+#endif
case EvqUniform:
case EvqBuffer:
{
const char* feature = "location qualifier on uniform or buffer";
- requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile, feature);
- profileRequires(loc, ECoreProfile | ECompatibilityProfile, 430, nullptr, feature);
+ requireProfile(loc, EEsProfile | ECoreProfile | ECompatibilityProfile | ENoProfile, feature);
+ profileRequires(loc, ~EEsProfile, 330, E_GL_ARB_explicit_attrib_location, feature);
+ profileRequires(loc, ~EEsProfile, 430, E_GL_ARB_explicit_uniform_location, feature);
profileRequires(loc, EEsProfile, 310, nullptr, feature);
break;
}
@@ -5867,18 +5948,17 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier
error(loc, "offset/align can only be used on a uniform or buffer", "layout", "");
}
}
- if (qualifier.layoutPushConstant) {
+ if (qualifier.isPushConstant()) {
if (qualifier.storage != EvqUniform)
error(loc, "can only be used with a uniform", "push_constant", "");
if (qualifier.hasSet())
error(loc, "cannot be used with push_constant", "set", "");
}
- if (qualifier.layoutBufferReference) {
+ if (qualifier.hasBufferReference()) {
if (qualifier.storage != EvqBuffer)
error(loc, "can only be used with buffer", "buffer_reference", "");
}
-#ifdef NV_EXTENSIONS
- if (qualifier.layoutShaderRecordNV) {
+ if (qualifier.isShaderRecordNV()) {
if (qualifier.storage != EvqBuffer)
error(loc, "can only be used with a buffer", "shaderRecordNV", "");
if (qualifier.hasBinding())
@@ -5890,12 +5970,12 @@ void TParseContext::layoutQualifierCheck(const TSourceLoc& loc, const TQualifier
if (qualifier.storage == EvqHitAttrNV && qualifier.hasLayout()) {
error(loc, "cannot apply layout qualifiers to hitAttributeNV variable", "hitAttributeNV", "");
}
-#endif
}
// For places that can't have shader-level layout qualifiers
void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQualifiers& shaderQualifiers)
{
+#ifndef GLSLANG_WEB
const char* message = "can only apply to a standalone qualifier";
if (shaderQualifiers.geometry != ElgNone)
@@ -5908,10 +5988,6 @@ void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQua
error(loc, message, "point_mode", "");
if (shaderQualifiers.invocations != TQualifier::layoutNotSet)
error(loc, message, "invocations", "");
- if (shaderQualifiers.earlyFragmentTests)
- error(loc, message, "early_fragment_tests", "");
- if (shaderQualifiers.postDepthCoverage)
- error(loc, message, "post_depth_coverage", "");
for (int i = 0; i < 3; ++i) {
if (shaderQualifiers.localSize[i] > 1)
error(loc, message, "local_size", "");
@@ -5919,36 +5995,38 @@ void TParseContext::checkNoShaderLayouts(const TSourceLoc& loc, const TShaderQua
error(loc, message, "local_size id", "");
}
if (shaderQualifiers.vertices != TQualifier::layoutNotSet) {
- if (language == EShLangGeometry
-#ifdef NV_EXTENSIONS
- || language == EShLangMeshNV
-#endif
- )
+ if (language == EShLangGeometry || language == EShLangMeshNV)
error(loc, message, "max_vertices", "");
else if (language == EShLangTessControl)
error(loc, message, "vertices", "");
else
assert(0);
}
-#ifdef NV_EXTENSIONS
+ if (shaderQualifiers.earlyFragmentTests)
+ error(loc, message, "early_fragment_tests", "");
+ if (shaderQualifiers.postDepthCoverage)
+ error(loc, message, "post_depth_coverage", "");
if (shaderQualifiers.primitives != TQualifier::layoutNotSet) {
if (language == EShLangMeshNV)
error(loc, message, "max_primitives", "");
else
assert(0);
}
-#endif
- if (shaderQualifiers.blendEquation)
+ if (shaderQualifiers.hasBlendEquation())
error(loc, message, "blend equation", "");
if (shaderQualifiers.numViews != TQualifier::layoutNotSet)
error(loc, message, "num_views", "");
+ if (shaderQualifiers.interlockOrdering != EioNone)
+ error(loc, message, TQualifier::getInterlockOrderingString(shaderQualifiers.interlockOrdering), "");
+#endif
}
// Correct and/or advance an object's offset layout qualifier.
void TParseContext::fixOffset(const TSourceLoc& loc, TSymbol& symbol)
{
const TQualifier& qualifier = symbol.getType().getQualifier();
- if (symbol.getType().getBasicType() == EbtAtomicUint) {
+#ifndef GLSLANG_WEB
+ if (symbol.getType().isAtomic()) {
if (qualifier.hasBinding() && (int)qualifier.layoutBinding < resources.maxAtomicCounterBindings) {
// Set the offset
@@ -5957,6 +6035,10 @@ void TParseContext::fixOffset(const TSourceLoc& loc, TSymbol& symbol)
offset = qualifier.layoutOffset;
else
offset = atomicUintOffsets[qualifier.layoutBinding];
+
+ if (offset % 4 != 0)
+ error(loc, "atomic counters offset should align based on 4:", "offset", "%d", offset);
+
symbol.getWritableType().getQualifier().layoutOffset = offset;
// Check for overlap
@@ -5977,6 +6059,7 @@ void TParseContext::fixOffset(const TSourceLoc& loc, TSymbol& symbol)
atomicUintOffsets[qualifier.layoutBinding] = offset + numOffsets;
}
}
+#endif
}
//
@@ -5986,13 +6069,16 @@ void TParseContext::fixOffset(const TSourceLoc& loc, TSymbol& symbol)
//
const TFunction* TParseContext::findFunction(const TSourceLoc& loc, const TFunction& call, bool& builtIn)
{
- const TFunction* function = nullptr;
-
if (symbolTable.isFunctionNameVariable(call.getName())) {
error(loc, "can't use function syntax on variable", call.getName().c_str(), "");
return nullptr;
}
+#ifdef GLSLANG_WEB
+ return findFunctionExact(loc, call, builtIn);
+#endif
+
+ const TFunction* function = nullptr;
bool explicitTypesEnabled = extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int8) ||
extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int16) ||
@@ -6002,10 +6088,10 @@ const TFunction* TParseContext::findFunction(const TSourceLoc& loc, const TFunct
extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float32) ||
extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float64);
- if (profile == EEsProfile || version < 120)
+ if (isEsProfile() || version < 120)
function = findFunctionExact(loc, call, builtIn);
else if (version < 400)
- function = findFunction120(loc, call, builtIn);
+ function = extensionTurnedOn(E_GL_ARB_gpu_shader_fp64) ? findFunction400(loc, call, builtIn) : findFunction120(loc, call, builtIn);
else if (explicitTypesEnabled)
function = findFunctionExplicitTypes(loc, call, builtIn);
else
@@ -6164,6 +6250,8 @@ const TFunction* TParseContext::findFunction400(const TSourceLoc& loc, const TFu
}
if (from.isArray() || to.isArray() || ! from.sameElementShape(to))
return false;
+ if (from.isCoopMat() && to.isCoopMat())
+ return from.sameCoopMatBaseType(to);
return intermediate.canImplicitlyPromote(from.getBasicType(), to.getBasicType());
};
@@ -6238,6 +6326,8 @@ const TFunction* TParseContext::findFunctionExplicitTypes(const TSourceLoc& loc,
}
if (from.isArray() || to.isArray() || ! from.sameElementShape(to))
return false;
+ if (from.isCoopMat() && to.isCoopMat())
+ return from.sameCoopMatBaseType(to);
return intermediate.canImplicitlyPromote(from.getBasicType(), to.getBasicType());
};
@@ -6289,21 +6379,26 @@ const TFunction* TParseContext::findFunctionExplicitTypes(const TSourceLoc& loc,
return bestMatch;
}
-// When a declaration includes a type, but not a variable name, it can be
+// When a declaration includes a type, but not a variable name, it can be used
// to establish defaults.
void TParseContext::declareTypeDefaults(const TSourceLoc& loc, const TPublicType& publicType)
{
- if (publicType.basicType == EbtAtomicUint && publicType.qualifier.hasBinding() && publicType.qualifier.hasOffset()) {
+#ifndef GLSLANG_WEB
+ if (publicType.basicType == EbtAtomicUint && publicType.qualifier.hasBinding()) {
if (publicType.qualifier.layoutBinding >= (unsigned int)resources.maxAtomicCounterBindings) {
error(loc, "atomic_uint binding is too large", "binding", "");
return;
}
- atomicUintOffsets[publicType.qualifier.layoutBinding] = publicType.qualifier.layoutOffset;
+
+ if(publicType.qualifier.hasOffset()) {
+ atomicUintOffsets[publicType.qualifier.layoutBinding] = publicType.qualifier.layoutOffset;
+ }
return;
}
- if (publicType.qualifier.hasLayout() && !publicType.qualifier.layoutBufferReference)
+ if (publicType.qualifier.hasLayout() && !publicType.qualifier.hasBufferReference())
warn(loc, "useless application of layout qualifier", "layout", "");
+#endif
}
//
@@ -6334,12 +6429,20 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden
if (!publicType.typeParameters || publicType.typeParameters->getNumDims() != 4) {
error(loc, "expected four type parameters", identifier.c_str(), "");
}
- if (publicType.typeParameters &&
- publicType.typeParameters->getDimSize(0) != 16 &&
- publicType.typeParameters->getDimSize(0) != 32 &&
- publicType.typeParameters->getDimSize(0) != 64) {
- error(loc, "expected 16, 32, or 64 bits for first type parameter", identifier.c_str(), "");
+ if (publicType.typeParameters) {
+ if (isTypeFloat(publicType.basicType) &&
+ publicType.typeParameters->getDimSize(0) != 16 &&
+ publicType.typeParameters->getDimSize(0) != 32 &&
+ publicType.typeParameters->getDimSize(0) != 64) {
+ error(loc, "expected 16, 32, or 64 bits for first type parameter", identifier.c_str(), "");
+ }
+ if (isTypeInt(publicType.basicType) &&
+ publicType.typeParameters->getDimSize(0) != 8 &&
+ publicType.typeParameters->getDimSize(0) != 32) {
+ error(loc, "expected 8 or 32 bits for first type parameter", identifier.c_str(), "");
+ }
}
+
} else {
if (publicType.typeParameters && publicType.typeParameters->getNumDims() != 0) {
error(loc, "unexpected type parameters", identifier.c_str(), "");
@@ -6355,17 +6458,18 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden
nonInitConstCheck(loc, identifier, type);
samplerCheck(loc, type, identifier, initializer);
- atomicUintCheck(loc, type, identifier);
transparentOpaqueCheck(loc, type, identifier);
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
+ atomicUintCheck(loc, type, identifier);
accStructNVCheck(loc, type, identifier);
+ checkAndResizeMeshViewDim(loc, type, /*isBlockMember*/ false);
#endif
- if (type.getQualifier().storage == EvqConst && type.containsBasicType(EbtReference)) {
+ if (type.getQualifier().storage == EvqConst && type.containsReference()) {
error(loc, "variables with reference type can't have qualifier 'const'", "qualifier", "");
}
if (type.getQualifier().storage != EvqUniform && type.getQualifier().storage != EvqBuffer) {
- if (type.containsBasicType(EbtFloat16))
+ if (type.contains16BitFloat())
requireFloat16Arithmetic(loc, "qualifier", "float16 types can only be in uniform block or buffer storage");
if (type.contains16BitInt())
requireInt16Arithmetic(loc, "qualifier", "(u)int16 types can only be in uniform block or buffer storage");
@@ -6373,13 +6477,12 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden
requireInt8Arithmetic(loc, "qualifier", "(u)int8 types can only be in uniform block or buffer storage");
}
- if (type.getQualifier().storage == EvqShared &&
- type.containsCoopMat())
+ if (type.getQualifier().storage == EvqShared && type.containsCoopMat())
error(loc, "qualifier", "Cooperative matrix types must not be used in shared memory", "");
if (identifier != "gl_FragCoord" && (publicType.shaderQualifiers.originUpperLeft || publicType.shaderQualifiers.pixelCenterInteger))
error(loc, "can only apply origin_upper_left and pixel_center_origin to gl_FragCoord", "layout qualifier", "");
- if (identifier != "gl_FragDepth" && publicType.shaderQualifiers.layoutDepth != EldNone)
+ if (identifier != "gl_FragDepth" && publicType.shaderQualifiers.getDepth() != EldNone)
error(loc, "can only apply depth layout to gl_FragDepth", "layout qualifier", "");
// Check for redeclaration of built-ins and/or attempting to declare a reserved name
@@ -6435,12 +6538,14 @@ TIntermNode* TParseContext::declareVariable(const TSourceLoc& loc, TString& iden
// Pick up global defaults from the provide global defaults into dst.
void TParseContext::inheritGlobalDefaults(TQualifier& dst) const
{
+#ifndef GLSLANG_WEB
if (dst.storage == EvqVaryingOut) {
if (! dst.hasStream() && language == EShLangGeometry)
dst.layoutStream = globalOutputDefaults.layoutStream;
if (! dst.hasXfbBuffer())
dst.layoutXfbBuffer = globalOutputDefaults.layoutXfbBuffer;
}
+#endif
}
//
@@ -6469,7 +6574,9 @@ TVariable* TParseContext::declareNonArray(const TSourceLoc& loc, const TString&
// make a new variable
TVariable* variable = new TVariable(&identifier, type);
+#ifndef GLSLANG_WEB
ioArrayCheck(loc, type, identifier);
+#endif
// add variable to symbol table
if (symbolTable.insert(*variable)) {
@@ -6496,7 +6603,7 @@ TIntermNode* TParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyp
//
TStorageQualifier qualifier = variable->getType().getQualifier().storage;
if (! (qualifier == EvqTemporary || qualifier == EvqGlobal || qualifier == EvqConst ||
- (qualifier == EvqUniform && profile != EEsProfile && version >= 120))) {
+ (qualifier == EvqUniform && !isEsProfile() && version >= 120))) {
error(loc, " cannot initialize this type of qualifier ", variable->getType().getStorageQualifierString(), "");
return nullptr;
}
@@ -6514,7 +6621,9 @@ TIntermNode* TParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyp
TType skeletalType;
skeletalType.shallowCopy(variable->getType());
skeletalType.getQualifier().makeTemporary();
+#ifndef GLSLANG_WEB
initializer = convertInitializerList(loc, skeletalType, initializer);
+#endif
if (! initializer) {
// error recovery; don't leave const without constant values
if (qualifier == EvqConst)
@@ -6568,7 +6677,7 @@ TIntermNode* TParseContext::executeInitializer(const TSourceLoc& loc, TIntermTyp
// qualifier any initializer must be a constant expression."
if (symbolTable.atGlobalLevel() && ! initializer->getType().getQualifier().isConstant()) {
const char* initFeature = "non-constant global initializer (needs GL_EXT_shader_non_constant_global_initializers)";
- if (profile == EEsProfile) {
+ if (isEsProfile()) {
if (relaxedErrors() && ! extensionTurnedOn(E_GL_EXT_shader_non_constant_global_initializers))
warn(loc, "not allowed in this version", initFeature, "");
else
@@ -6819,7 +6928,7 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
// This avoids requesting a matrix of a new type that is going to be discarded anyway.
// TODO: This could be generalized to more type combinations, but that would require
// more extensive testing and full algorithm rework. For now, the need to do two changes makes
- // the recursive call work, and avoids the most aggregious case of creating integer matrices.
+ // the recursive call work, and avoids the most egregious case of creating integer matrices.
if (node->getType().isMatrix() && (type.isScalar() || type.isVector()) &&
type.isFloatingDomain() != node->getType().isFloatingDomain()) {
TType transitionType(node->getBasicType(), glslang::EvqTemporary, type.getVectorSize(), 0, 0, node->isVector());
@@ -6850,6 +6959,35 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
basicOp = EOpConstructFloat;
break;
+ case EOpConstructIVec2:
+ case EOpConstructIVec3:
+ case EOpConstructIVec4:
+ case EOpConstructInt:
+ basicOp = EOpConstructInt;
+ break;
+
+ case EOpConstructUVec2:
+ if (node->getType().getBasicType() == EbtReference) {
+ requireExtensions(loc, 1, &E_GL_EXT_buffer_reference_uvec2, "reference conversion to uvec2");
+ TIntermTyped* newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvPtrToUvec2, true, node,
+ type);
+ return newNode;
+ }
+ case EOpConstructUVec3:
+ case EOpConstructUVec4:
+ case EOpConstructUint:
+ basicOp = EOpConstructUint;
+ break;
+
+ case EOpConstructBVec2:
+ case EOpConstructBVec3:
+ case EOpConstructBVec4:
+ case EOpConstructBool:
+ basicOp = EOpConstructBool;
+ break;
+
+#ifndef GLSLANG_WEB
+
case EOpConstructDVec2:
case EOpConstructDVec3:
case EOpConstructDVec4:
@@ -6880,6 +7018,22 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
case EOpConstructF16Mat4x4:
case EOpConstructFloat16:
basicOp = EOpConstructFloat16;
+ // 8/16-bit storage extensions don't support constructing composites of 8/16-bit types,
+ // so construct a 32-bit type and convert
+ if (!intermediate.getArithemeticFloat16Enabled()) {
+ TType tempType(EbtFloat, EvqTemporary, type.getVectorSize());
+ newNode = node;
+ if (tempType != newNode->getType()) {
+ TOperator aggregateOp;
+ if (op == EOpConstructFloat16)
+ aggregateOp = EOpConstructFloat;
+ else
+ aggregateOp = (TOperator)(EOpConstructVec2 + op - EOpConstructF16Vec2);
+ newNode = intermediate.setAggregateOperator(newNode, aggregateOp, tempType, node->getLoc());
+ }
+ newNode = intermediate.addConversion(EbtFloat16, newNode);
+ return newNode;
+ }
break;
case EOpConstructI8Vec2:
@@ -6887,6 +7041,22 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
case EOpConstructI8Vec4:
case EOpConstructInt8:
basicOp = EOpConstructInt8;
+ // 8/16-bit storage extensions don't support constructing composites of 8/16-bit types,
+ // so construct a 32-bit type and convert
+ if (!intermediate.getArithemeticInt8Enabled()) {
+ TType tempType(EbtInt, EvqTemporary, type.getVectorSize());
+ newNode = node;
+ if (tempType != newNode->getType()) {
+ TOperator aggregateOp;
+ if (op == EOpConstructInt8)
+ aggregateOp = EOpConstructInt;
+ else
+ aggregateOp = (TOperator)(EOpConstructIVec2 + op - EOpConstructI8Vec2);
+ newNode = intermediate.setAggregateOperator(newNode, aggregateOp, tempType, node->getLoc());
+ }
+ newNode = intermediate.addConversion(EbtInt8, newNode);
+ return newNode;
+ }
break;
case EOpConstructU8Vec2:
@@ -6894,6 +7064,22 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
case EOpConstructU8Vec4:
case EOpConstructUint8:
basicOp = EOpConstructUint8;
+ // 8/16-bit storage extensions don't support constructing composites of 8/16-bit types,
+ // so construct a 32-bit type and convert
+ if (!intermediate.getArithemeticInt8Enabled()) {
+ TType tempType(EbtUint, EvqTemporary, type.getVectorSize());
+ newNode = node;
+ if (tempType != newNode->getType()) {
+ TOperator aggregateOp;
+ if (op == EOpConstructUint8)
+ aggregateOp = EOpConstructUint;
+ else
+ aggregateOp = (TOperator)(EOpConstructUVec2 + op - EOpConstructU8Vec2);
+ newNode = intermediate.setAggregateOperator(newNode, aggregateOp, tempType, node->getLoc());
+ }
+ newNode = intermediate.addConversion(EbtUint8, newNode);
+ return newNode;
+ }
break;
case EOpConstructI16Vec2:
@@ -6901,6 +7087,22 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
case EOpConstructI16Vec4:
case EOpConstructInt16:
basicOp = EOpConstructInt16;
+ // 8/16-bit storage extensions don't support constructing composites of 8/16-bit types,
+ // so construct a 32-bit type and convert
+ if (!intermediate.getArithemeticInt16Enabled()) {
+ TType tempType(EbtInt, EvqTemporary, type.getVectorSize());
+ newNode = node;
+ if (tempType != newNode->getType()) {
+ TOperator aggregateOp;
+ if (op == EOpConstructInt16)
+ aggregateOp = EOpConstructInt;
+ else
+ aggregateOp = (TOperator)(EOpConstructIVec2 + op - EOpConstructI16Vec2);
+ newNode = intermediate.setAggregateOperator(newNode, aggregateOp, tempType, node->getLoc());
+ }
+ newNode = intermediate.addConversion(EbtInt16, newNode);
+ return newNode;
+ }
break;
case EOpConstructU16Vec2:
@@ -6908,20 +7110,22 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
case EOpConstructU16Vec4:
case EOpConstructUint16:
basicOp = EOpConstructUint16;
- break;
-
- case EOpConstructIVec2:
- case EOpConstructIVec3:
- case EOpConstructIVec4:
- case EOpConstructInt:
- basicOp = EOpConstructInt;
- break;
-
- case EOpConstructUVec2:
- case EOpConstructUVec3:
- case EOpConstructUVec4:
- case EOpConstructUint:
- basicOp = EOpConstructUint;
+ // 8/16-bit storage extensions don't support constructing composites of 8/16-bit types,
+ // so construct a 32-bit type and convert
+ if (!intermediate.getArithemeticInt16Enabled()) {
+ TType tempType(EbtUint, EvqTemporary, type.getVectorSize());
+ newNode = node;
+ if (tempType != newNode->getType()) {
+ TOperator aggregateOp;
+ if (op == EOpConstructUint16)
+ aggregateOp = EOpConstructUint;
+ else
+ aggregateOp = (TOperator)(EOpConstructUVec2 + op - EOpConstructU16Vec2);
+ newNode = intermediate.setAggregateOperator(newNode, aggregateOp, tempType, node->getLoc());
+ }
+ newNode = intermediate.addConversion(EbtUint16, newNode);
+ return newNode;
+ }
break;
case EOpConstructI64Vec2:
@@ -6932,7 +7136,7 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
break;
case EOpConstructUint64:
- if (type.isScalar() && node->getType().getBasicType() == EbtReference) {
+ if (type.isScalar() && node->getType().isReference()) {
TIntermTyped* newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvPtrToUint64, true, node, type);
return newNode;
}
@@ -6943,27 +7147,27 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
basicOp = EOpConstructUint64;
break;
- case EOpConstructBVec2:
- case EOpConstructBVec3:
- case EOpConstructBVec4:
- case EOpConstructBool:
- basicOp = EOpConstructBool;
- break;
-
case EOpConstructNonuniform:
// Make a nonuniform copy of node
- newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpCopyObject, true, node, node->getType());
- newNode->getWritableType().getQualifier().nonUniform = true;
+ newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpCopyObject, true, node, type);
return newNode;
case EOpConstructReference:
// construct reference from reference
- if (node->getType().getBasicType() == EbtReference) {
+ if (node->getType().isReference()) {
newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConstructReference, true, node, type);
return newNode;
// construct reference from uint64
} else if (node->getType().isScalar() && node->getType().getBasicType() == EbtUint64) {
- TIntermTyped* newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvUint64ToPtr, true, node, type);
+ TIntermTyped* newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvUint64ToPtr, true, node,
+ type);
+ return newNode;
+ // construct reference from uvec2
+ } else if (node->getType().isVector() && node->getType().getBasicType() == EbtUint &&
+ node->getVectorSize() == 2) {
+ requireExtensions(loc, 1, &E_GL_EXT_buffer_reference_uvec2, "uvec2 conversion to reference");
+ TIntermTyped* newNode = intermediate.addBuiltInFunctionCall(node->getLoc(), EOpConvUvec2ToPtr, true, node,
+ type);
return newNode;
} else {
return nullptr;
@@ -6976,19 +7180,75 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
}
node = intermediate.setAggregateOperator(node, EOpConstructCooperativeMatrix, type, node->getLoc());
} else {
+ TOperator op = EOpNull;
switch (type.getBasicType()) {
default:
assert(0);
break;
+ case EbtInt:
+ switch (node->getType().getBasicType()) {
+ case EbtFloat: op = EOpConvFloatToInt; break;
+ case EbtFloat16: op = EOpConvFloat16ToInt; break;
+ case EbtUint8: op = EOpConvUint8ToInt; break;
+ case EbtInt8: op = EOpConvInt8ToInt; break;
+ case EbtUint: op = EOpConvUintToInt; break;
+ default: assert(0);
+ }
+ break;
+ case EbtUint:
+ switch (node->getType().getBasicType()) {
+ case EbtFloat: op = EOpConvFloatToUint; break;
+ case EbtFloat16: op = EOpConvFloat16ToUint; break;
+ case EbtUint8: op = EOpConvUint8ToUint; break;
+ case EbtInt8: op = EOpConvInt8ToUint; break;
+ case EbtInt: op = EOpConvIntToUint; break;
+ case EbtUint: op = EOpConvUintToInt8; break;
+ default: assert(0);
+ }
+ break;
+ case EbtInt8:
+ switch (node->getType().getBasicType()) {
+ case EbtFloat: op = EOpConvFloatToInt8; break;
+ case EbtFloat16: op = EOpConvFloat16ToInt8; break;
+ case EbtUint8: op = EOpConvUint8ToInt8; break;
+ case EbtInt: op = EOpConvIntToInt8; break;
+ case EbtUint: op = EOpConvUintToInt8; break;
+ default: assert(0);
+ }
+ break;
+ case EbtUint8:
+ switch (node->getType().getBasicType()) {
+ case EbtFloat: op = EOpConvFloatToUint8; break;
+ case EbtFloat16: op = EOpConvFloat16ToUint8; break;
+ case EbtInt8: op = EOpConvInt8ToUint8; break;
+ case EbtInt: op = EOpConvIntToUint8; break;
+ case EbtUint: op = EOpConvUintToUint8; break;
+ default: assert(0);
+ }
+ break;
case EbtFloat:
- assert(node->getType().getBasicType() == EbtFloat16);
- node = intermediate.addUnaryNode(EOpConvFloat16ToFloat, node, node->getLoc(), type);
+ switch (node->getType().getBasicType()) {
+ case EbtFloat16: op = EOpConvFloat16ToFloat; break;
+ case EbtInt8: op = EOpConvInt8ToFloat; break;
+ case EbtUint8: op = EOpConvUint8ToFloat; break;
+ case EbtInt: op = EOpConvIntToFloat; break;
+ case EbtUint: op = EOpConvUintToFloat; break;
+ default: assert(0);
+ }
break;
case EbtFloat16:
- assert(node->getType().getBasicType() == EbtFloat);
- node = intermediate.addUnaryNode(EOpConvFloatToFloat16, node, node->getLoc(), type);
+ switch (node->getType().getBasicType()) {
+ case EbtFloat: op = EOpConvFloatToFloat16; break;
+ case EbtInt8: op = EOpConvInt8ToFloat16; break;
+ case EbtUint8: op = EOpConvUint8ToFloat16; break;
+ case EbtInt: op = EOpConvIntToFloat16; break;
+ case EbtUint: op = EOpConvUintToFloat16; break;
+ default: assert(0);
+ }
break;
}
+
+ node = intermediate.addUnaryNode(op, node, node->getLoc(), type);
// If it's a (non-specialization) constant, it must be folded.
if (node->getAsUnaryNode()->getOperand()->getAsConstantUnion())
return node->getAsUnaryNode()->getOperand()->getAsConstantUnion()->fold(op, node->getType());
@@ -6996,6 +7256,8 @@ TIntermTyped* TParseContext::constructBuiltIn(const TType& type, TOperator op, T
return node;
+#endif // GLSLANG_WEB
+
default:
error(loc, "unsupported construction", "", "");
@@ -7037,6 +7299,23 @@ TIntermTyped* TParseContext::constructAggregate(TIntermNode* node, const TType&
return converted;
}
+// If a memory qualifier is present in 'to', also make it present in 'from'.
+void TParseContext::inheritMemoryQualifiers(const TQualifier& from, TQualifier& to)
+{
+#ifndef GLSLANG_WEB
+ if (from.isReadOnly())
+ to.readonly = from.readonly;
+ if (from.isWriteOnly())
+ to.writeonly = from.writeonly;
+ if (from.coherent)
+ to.coherent = from.coherent;
+ if (from.volatil)
+ to.volatil = from.volatil;
+ if (from.restrict)
+ to.restrict = from.restrict;
+#endif
+}
+
//
// Do everything needed to add an interface block.
//
@@ -7052,7 +7331,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
requireProfile(loc, ~EEsProfile, "array-of-array of block");
}
- // fix and check for member storage qualifiers and types that don't belong within a block
+ // Inherit and check member storage qualifiers WRT to the block-level qualifier.
for (unsigned int member = 0; member < typeList.size(); ++member) {
TType& memberType = *typeList[member].type;
TQualifier& memberQualifier = memberType.getQualifier();
@@ -7061,7 +7340,8 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
if (memberQualifier.storage != EvqTemporary && memberQualifier.storage != EvqGlobal && memberQualifier.storage != currentBlockQualifier.storage)
error(memberLoc, "member storage qualifier cannot contradict block storage qualifier", memberType.getFieldName().c_str(), "");
memberQualifier.storage = currentBlockQualifier.storage;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
+ inheritMemoryQualifiers(currentBlockQualifier, memberQualifier);
if (currentBlockQualifier.perPrimitiveNV)
memberQualifier.perPrimitiveNV = currentBlockQualifier.perPrimitiveNV;
if (currentBlockQualifier.perViewNV)
@@ -7114,18 +7394,13 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
// Special case for "push_constant uniform", which has a default of std430,
// contrary to normal uniform defaults, and can't have a default tracked for it.
- if ((currentBlockQualifier.layoutPushConstant && !currentBlockQualifier.hasPacking())
-#ifdef NV_EXTENSIONS
- || (currentBlockQualifier.layoutShaderRecordNV && !currentBlockQualifier.hasPacking())
-#endif
- )
+ if ((currentBlockQualifier.isPushConstant() && !currentBlockQualifier.hasPacking()) ||
+ (currentBlockQualifier.isShaderRecordNV() && !currentBlockQualifier.hasPacking()))
currentBlockQualifier.layoutPacking = ElpStd430;
-#ifdef NV_EXTENSIONS
// Special case for "taskNV in/out", which has a default of std430,
- if (currentBlockQualifier.perTaskNV && !currentBlockQualifier.hasPacking())
+ if (currentBlockQualifier.isTaskMemory() && !currentBlockQualifier.hasPacking())
currentBlockQualifier.layoutPacking = ElpStd430;
-#endif
// fix and check for member layout qualifiers
@@ -7143,12 +7418,11 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
bool memberWithLocation = false;
bool memberWithoutLocation = false;
-#ifdef NV_EXTENSIONS
bool memberWithPerViewQualifier = false;
-#endif
for (unsigned int member = 0; member < typeList.size(); ++member) {
TQualifier& memberQualifier = typeList[member].type->getQualifier();
const TSourceLoc& memberLoc = typeList[member].loc;
+#ifndef GLSLANG_WEB
if (memberQualifier.hasStream()) {
if (defaultQualification.layoutStream != memberQualifier.layoutStream)
error(memberLoc, "member cannot contradict block", "stream", "");
@@ -7162,12 +7436,14 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
if (defaultQualification.layoutXfbBuffer != memberQualifier.layoutXfbBuffer)
error(memberLoc, "member cannot contradict block (or what block inherited from global)", "xfb_buffer", "");
}
+#endif
if (memberQualifier.hasPacking())
error(memberLoc, "member of block cannot have a packing layout qualifier", typeList[member].type->getFieldName().c_str(), "");
if (memberQualifier.hasLocation()) {
const char* feature = "location on block member";
switch (currentBlockQualifier.storage) {
+#ifndef GLSLANG_WEB
case EvqVaryingIn:
case EvqVaryingOut:
requireProfile(memberLoc, ECoreProfile | ECompatibilityProfile | EEsProfile, feature);
@@ -7175,6 +7451,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
profileRequires(memberLoc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, feature);
memberWithLocation = true;
break;
+#endif
default:
error(memberLoc, "can only use in an in/out block", feature, "");
break;
@@ -7191,11 +7468,9 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
error(memberLoc, "can only be used with std140, std430, or scalar layout packing", "offset/align", "");
}
-#ifdef NV_EXTENSIONS
if (memberQualifier.isPerView()) {
memberWithPerViewQualifier = true;
}
-#endif
TQualifier newMemberQualification = defaultQualification;
mergeQualifiers(memberLoc, newMemberQualification, memberQualifier, false);
@@ -7204,6 +7479,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
layoutMemberLocationArrayCheck(loc, memberWithLocation, arraySizes);
+#ifndef GLSLANG_WEB
// Ensure that the block has an XfbBuffer assigned. This is needed
// because if the block has a XfbOffset assigned, then it is
// assumed that it has implicitly assigned the current global
@@ -7213,6 +7489,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
if (!currentBlockQualifier.hasXfbBuffer() && currentBlockQualifier.hasXfbOffset())
currentBlockQualifier.layoutXfbBuffer = globalOutputDefaults.layoutXfbBuffer;
}
+#endif
// Process the members
fixBlockLocations(loc, currentBlockQualifier, typeList, memberWithLocation, memberWithoutLocation);
@@ -7221,10 +7498,10 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
for (unsigned int member = 0; member < typeList.size(); ++member)
layoutTypeCheck(typeList[member].loc, *typeList[member].type);
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
if (memberWithPerViewQualifier) {
for (unsigned int member = 0; member < typeList.size(); ++member) {
- resizeMeshViewDimension(typeList[member].loc, *typeList[member].type);
+ checkAndResizeMeshViewDim(typeList[member].loc, *typeList[member].type, /*isBlockMember*/ true);
}
}
#endif
@@ -7240,10 +7517,11 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
TType blockType(&typeList, *blockName, currentBlockQualifier);
if (arraySizes != nullptr)
blockType.transferArraySizes(arraySizes);
- else
- ioArrayCheck(loc, blockType, instanceName ? *instanceName : *blockName);
- if (currentBlockQualifier.layoutBufferReference) {
+#ifndef GLSLANG_WEB
+ if (arraySizes == nullptr)
+ ioArrayCheck(loc, blockType, instanceName ? *instanceName : *blockName);
+ if (currentBlockQualifier.hasBufferReference()) {
if (currentBlockQualifier.storage != EvqBuffer)
error(loc, "can only be used with buffer", "buffer_reference", "");
@@ -7255,7 +7533,7 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
TVariable* blockNameVar = new TVariable(blockName, blockNameType, true);
if (! symbolTable.insert(*blockNameVar)) {
TSymbol* existingName = symbolTable.find(*blockName);
- if (existingName->getType().getBasicType() == EbtReference &&
+ if (existingName->getType().isReference() &&
existingName->getType().getReferentType()->getStruct() &&
existingName->getType().getReferentType()->getStruct()->size() == 0 &&
existingName->getType().getQualifier().storage == blockType.getQualifier().storage) {
@@ -7267,7 +7545,9 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
if (!instanceName) {
return;
}
- } else {
+ } else
+#endif
+ {
//
// Don't make a user-defined type out of block name; that will cause an error
// if the same block name gets reused in a different interface.
@@ -7315,12 +7595,14 @@ void TParseContext::declareBlock(const TSourceLoc& loc, TTypeList& typeList, con
// Check for general layout qualifier errors
layoutObjectCheck(loc, variable);
+#ifndef GLSLANG_WEB
// fix up
if (isIoResizeArray(blockType)) {
ioArraySymbolResizeList.push_back(&variable);
checkIoArraysConsistency(loc, true);
} else
fixIoArraySize(loc, variable.getWritableType());
+#endif
// Save it in the AST for linker use.
trackLinkage(variable);
@@ -7333,8 +7615,8 @@ void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& q
switch (qualifier.storage) {
case EvqUniform:
profileRequires(loc, EEsProfile, 300, nullptr, "uniform block");
- profileRequires(loc, ENoProfile, 140, nullptr, "uniform block");
- if (currentBlockQualifier.layoutPacking == ElpStd430 && ! currentBlockQualifier.layoutPushConstant)
+ profileRequires(loc, ENoProfile, 140, E_GL_ARB_uniform_buffer_object, "uniform block");
+ if (currentBlockQualifier.layoutPacking == ElpStd430 && ! currentBlockQualifier.isPushConstant())
requireExtensions(loc, 1, &E_GL_EXT_scalar_block_layout, "std430 requires the buffer storage qualifier");
break;
case EvqBuffer:
@@ -7346,41 +7628,28 @@ void TParseContext::blockStageIoCheck(const TSourceLoc& loc, const TQualifier& q
profileRequires(loc, ~EEsProfile, 150, E_GL_ARB_separate_shader_objects, "input block");
// It is a compile-time error to have an input block in a vertex shader or an output block in a fragment shader
// "Compute shaders do not permit user-defined input variables..."
- requireStage(loc, (EShLanguageMask)(EShLangTessControlMask|EShLangTessEvaluationMask|EShLangGeometryMask|EShLangFragmentMask
-#ifdef NV_EXTENSIONS
- |EShLangMeshNVMask
-#endif
- ), "input block");
+ requireStage(loc, (EShLanguageMask)(EShLangTessControlMask|EShLangTessEvaluationMask|EShLangGeometryMask|
+ EShLangFragmentMask|EShLangMeshNVMask), "input block");
if (language == EShLangFragment) {
profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, "fragment input block");
- }
-#ifdef NV_EXTENSIONS
- else if (language == EShLangMeshNV && ! qualifier.isTaskMemory()) {
+ } else if (language == EShLangMeshNV && ! qualifier.isTaskMemory()) {
error(loc, "input blocks cannot be used in a mesh shader", "out", "");
}
-#endif
break;
case EvqVaryingOut:
profileRequires(loc, ~EEsProfile, 150, E_GL_ARB_separate_shader_objects, "output block");
- requireStage(loc, (EShLanguageMask)(EShLangVertexMask|EShLangTessControlMask|EShLangTessEvaluationMask|EShLangGeometryMask
-#ifdef NV_EXTENSIONS
- |EShLangMeshNVMask|EShLangTaskNVMask
-#endif
- ), "output block");
+ requireStage(loc, (EShLanguageMask)(EShLangVertexMask|EShLangTessControlMask|EShLangTessEvaluationMask|
+ EShLangGeometryMask|EShLangMeshNVMask|EShLangTaskNVMask), "output block");
// ES 310 can have a block before shader_io is turned on, so skip this test for built-ins
if (language == EShLangVertex && ! parsingBuiltins) {
profileRequires(loc, EEsProfile, 320, Num_AEP_shader_io_blocks, AEP_shader_io_blocks, "vertex output block");
- }
-#ifdef NV_EXTENSIONS
- else if (language == EShLangMeshNV && qualifier.isTaskMemory()) {
+ } else if (language == EShLangMeshNV && qualifier.isTaskMemory()) {
error(loc, "can only use on input blocks in mesh shader", "taskNV", "");
- }
- else if (language == EShLangTaskNV && ! qualifier.isTaskMemory()) {
+ } else if (language == EShLangTaskNV && ! qualifier.isTaskMemory()) {
error(loc, "output blocks cannot be used in a task shader", "out", "");
}
-#endif
break;
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
case EvqPayloadNV:
profileRequires(loc, ~EEsProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV block");
requireStage(loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangAnyHitNVMask | EShLangClosestHitNVMask | EShLangMissNVMask),
@@ -7435,18 +7704,16 @@ void TParseContext::blockQualifierCheck(const TSourceLoc& loc, const TQualifier&
error(loc, "cannot use interpolation qualifiers on an interface block", "flat/smooth/noperspective", "");
if (qualifier.centroid)
error(loc, "cannot use centroid qualifier on an interface block", "centroid", "");
- if (qualifier.sample)
+ if (qualifier.isSample())
error(loc, "cannot use sample qualifier on an interface block", "sample", "");
if (qualifier.invariant)
error(loc, "cannot use invariant qualifier on an interface block", "invariant", "");
- if (qualifier.layoutPushConstant)
+ if (qualifier.isPushConstant())
intermediate.addPushConstantCount();
-#ifdef NV_EXTENSIONS
- if (qualifier.layoutShaderRecordNV)
+ if (qualifier.isShaderRecordNV())
intermediate.addShaderRecordNVCount();
- if (qualifier.perTaskNV)
+ if (qualifier.isTaskMemory())
intermediate.addTaskNVCount();
-#endif
}
//
@@ -7495,6 +7762,7 @@ void TParseContext::fixBlockLocations(const TSourceLoc& loc, TQualifier& qualifi
void TParseContext::fixXfbOffsets(TQualifier& qualifier, TTypeList& typeList)
{
+#ifndef GLSLANG_WEB
// "If a block is qualified with xfb_offset, all its
// members are assigned transform feedback buffer offsets. If a block is not qualified with xfb_offset, any
// members of that block not qualified with an xfb_offset will not be assigned transform feedback buffer
@@ -7507,24 +7775,18 @@ void TParseContext::fixXfbOffsets(TQualifier& qualifier, TTypeList& typeList)
for (unsigned int member = 0; member < typeList.size(); ++member) {
TQualifier& memberQualifier = typeList[member].type->getQualifier();
bool contains64BitType = false;
-#ifdef AMD_EXTENSIONS
bool contains32BitType = false;
bool contains16BitType = false;
int memberSize = intermediate.computeTypeXfbSize(*typeList[member].type, contains64BitType, contains32BitType, contains16BitType);
-#else
- int memberSize = intermediate.computeTypeXfbSize(*typeList[member].type, contains64BitType);
-#endif
// see if we need to auto-assign an offset to this member
if (! memberQualifier.hasXfbOffset()) {
// "if applied to an aggregate containing a double or 64-bit integer, the offset must also be a multiple of 8"
if (contains64BitType)
RoundToPow2(nextOffset, 8);
-#ifdef AMD_EXTENSIONS
else if (contains32BitType)
RoundToPow2(nextOffset, 4);
else if (contains16BitType)
RoundToPow2(nextOffset, 2);
-#endif
memberQualifier.layoutXfbOffset = nextOffset;
} else
nextOffset = memberQualifier.layoutXfbOffset;
@@ -7534,6 +7796,7 @@ void TParseContext::fixXfbOffsets(TQualifier& qualifier, TTypeList& typeList)
// The above gave all block members an offset, so we can take it off the block now,
// which will avoid double counting the offset usage.
qualifier.layoutXfbOffset = TQualifier::layoutXfbOffsetEnd;
+#endif
}
// Calculate and save the offset of each block member, using the recursively
@@ -7610,7 +7873,7 @@ void TParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qua
// a qualifier to an existing symbol. Detect this and create the block reference
// type with an empty type list, which will be filled in later in
// TParseContext::declareBlock.
- if (!symbol && qualifier.layoutBufferReference) {
+ if (!symbol && qualifier.hasBufferReference()) {
TTypeList typeList;
TType blockType(&typeList, identifier, qualifier);;
TType blockNameType(EbtReference, blockType, identifier);
@@ -7650,10 +7913,10 @@ void TParseContext::addQualifierToExisting(const TSourceLoc& loc, TQualifier qua
error(loc, "cannot change qualification after use", "invariant", "");
symbol->getWritableType().getQualifier().invariant = true;
invariantCheck(loc, symbol->getType().getQualifier());
- } else if (qualifier.noContraction) {
+ } else if (qualifier.isNoContraction()) {
if (intermediate.inIoAccessed(identifier))
error(loc, "cannot change qualification after use", "precise", "");
- symbol->getWritableType().getQualifier().noContraction = true;
+ symbol->getWritableType().getQualifier().setNoContraction();
} else if (qualifier.specConstant) {
symbol->getWritableType().getQualifier().makeSpecConstant();
if (qualifier.hasSpecConstantId())
@@ -7676,7 +7939,7 @@ void TParseContext::invariantCheck(const TSourceLoc& loc, const TQualifier& qual
bool pipeOut = qualifier.isPipeOutput();
bool pipeIn = qualifier.isPipeInput();
- if (version >= 300 || (profile != EEsProfile && version >= 420)) {
+ if (version >= 300 || (!isEsProfile() && version >= 420)) {
if (! pipeOut)
error(loc, "can only apply to an output", "invariant", "");
} else {
@@ -7691,12 +7954,9 @@ void TParseContext::invariantCheck(const TSourceLoc& loc, const TQualifier& qual
//
void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, const TPublicType& publicType)
{
+#ifndef GLSLANG_WEB
if (publicType.shaderQualifiers.vertices != TQualifier::layoutNotSet) {
-#ifdef NV_EXTENSIONS
assert(language == EShLangTessControl || language == EShLangGeometry || language == EShLangMeshNV);
-#else
- assert(language == EShLangTessControl || language == EShLangGeometry);
-#endif
const char* id = (language == EShLangTessControl) ? "vertices" : "max_vertices";
if (publicType.qualifier.storage != EvqVaryingOut)
@@ -7707,7 +7967,6 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
if (language == EShLangTessControl)
checkIoArraysConsistency(loc);
}
-#ifdef NV_EXTENSIONS
if (publicType.shaderQualifiers.primitives != TQualifier::layoutNotSet) {
assert(language == EShLangMeshNV);
const char* id = "max_primitives";
@@ -7717,7 +7976,6 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
if (! intermediate.setPrimitives(publicType.shaderQualifiers.primitives))
error(loc, "cannot change previously set layout value", id, "");
}
-#endif
if (publicType.shaderQualifiers.invocations != TQualifier::layoutNotSet) {
if (publicType.qualifier.storage != EvqVaryingIn)
error(loc, "can only apply to 'in'", "invocations", "");
@@ -7734,12 +7992,10 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
case ElgTrianglesAdjacency:
case ElgQuads:
case ElgIsolines:
-#ifdef NV_EXTENSIONS
if (language == EShLangMeshNV) {
error(loc, "cannot apply to input", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), "");
break;
}
-#endif
if (intermediate.setInputPrimitive(publicType.shaderQualifiers.geometry)) {
if (language == EShLangGeometry)
checkIoArraysConsistency(loc);
@@ -7751,14 +8007,12 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
}
} else if (publicType.qualifier.storage == EvqVaryingOut) {
switch (publicType.shaderQualifiers.geometry) {
-#ifdef NV_EXTENSIONS
case ElgLines:
case ElgTriangles:
if (language != EShLangMeshNV) {
error(loc, "cannot apply to 'out'", TQualifier::getGeometryString(publicType.shaderQualifiers.geometry), "");
break;
}
-#endif
// Fall through
case ElgPoints:
case ElgLineStrip:
@@ -7792,8 +8046,9 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
else
error(loc, "can only apply to 'in'", "point_mode", "");
}
+#endif
for (int i = 0; i < 3; ++i) {
- if (publicType.shaderQualifiers.localSize[i] > 1) {
+ if (publicType.shaderQualifiers.localSizeNotDefault[i]) {
if (publicType.qualifier.storage == EvqVaryingIn) {
if (! intermediate.setLocalSize(i, publicType.shaderQualifiers.localSize[i]))
error(loc, "cannot change previously set size", "local_size", "");
@@ -7809,7 +8064,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
if (intermediate.getLocalSize(i) > (unsigned int)max)
error(loc, "too large; see gl_MaxComputeWorkGroupSize", "local_size", "");
}
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
else if (language == EShLangMeshNV) {
switch (i) {
case 0: max = resources.maxMeshWorkGroupSizeX_NV; break;
@@ -7819,8 +8074,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
}
if (intermediate.getLocalSize(i) > (unsigned int)max)
error(loc, "too large; see gl_MaxMeshWorkGroupSizeNV", "local_size", "");
- }
- else if (language == EShLangTaskNV) {
+ } else if (language == EShLangTaskNV) {
switch (i) {
case 0: max = resources.maxTaskWorkGroupSizeX_NV; break;
case 1: max = resources.maxTaskWorkGroupSizeY_NV; break;
@@ -7855,6 +8109,8 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
workGroupSize->getWritableType().getQualifier().specConstant = true;
}
}
+
+#ifndef GLSLANG_WEB
if (publicType.shaderQualifiers.earlyFragmentTests) {
if (publicType.qualifier.storage == EvqVaryingIn)
intermediate.setEarlyFragmentTests();
@@ -7867,12 +8123,19 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
else
error(loc, "can only apply to 'in'", "post_coverage_coverage", "");
}
- if (publicType.shaderQualifiers.blendEquation) {
+ if (publicType.shaderQualifiers.hasBlendEquation()) {
if (publicType.qualifier.storage != EvqVaryingOut)
error(loc, "can only apply to 'out'", "blend equation", "");
}
+ if (publicType.shaderQualifiers.interlockOrdering) {
+ if (publicType.qualifier.storage == EvqVaryingIn) {
+ if (!intermediate.setInterlockOrdering(publicType.shaderQualifiers.interlockOrdering))
+ error(loc, "cannot change previously set fragment shader interlock ordering", TQualifier::getInterlockOrderingString(publicType.shaderQualifiers.interlockOrdering), "");
+ }
+ else
+ error(loc, "can only apply to 'in'", TQualifier::getInterlockOrderingString(publicType.shaderQualifiers.interlockOrdering), "");
+ }
-#ifdef NV_EXTENSIONS
if (publicType.shaderQualifiers.layoutDerivativeGroupQuads &&
publicType.shaderQualifiers.layoutDerivativeGroupLinear) {
error(loc, "cannot be both specified", "derivative_group_quadsNV and derivative_group_linearNV", "");
@@ -7917,6 +8180,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
qualifier.isInterpolation() ||
qualifier.precision != EpqNone)
error(loc, "cannot use auxiliary, memory, interpolation, or precision qualifier in a default qualifier declaration (declaration with no type)", "qualifier", "");
+
// "The offset qualifier can only be used on block members of blocks..."
// "The align qualifier can only be used on blocks or block members..."
if (qualifier.hasOffset() ||
@@ -7941,6 +8205,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
case EvqVaryingIn:
break;
case EvqVaryingOut:
+#ifndef GLSLANG_WEB
if (qualifier.hasStream())
globalOutputDefaults.layoutStream = qualifier.layoutStream;
if (qualifier.hasXfbBuffer())
@@ -7949,6 +8214,7 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
if (! intermediate.setXfbBufferStride(globalOutputDefaults.layoutXfbBuffer, qualifier.layoutXfbStride))
error(loc, "all stride settings must match for xfb buffer", "xfb_stride", "%d", qualifier.layoutXfbBuffer);
}
+#endif
break;
default:
error(loc, "default qualifier requires 'uniform', 'buffer', 'in', or 'out' storage qualification", "", "");
@@ -7961,16 +8227,14 @@ void TParseContext::updateStandaloneQualifierDefaults(const TSourceLoc& loc, con
error(loc, "cannot declare a default, use a full declaration", "location/component/index", "");
if (qualifier.hasXfbOffset())
error(loc, "cannot declare a default, use a full declaration", "xfb_offset", "");
- if (qualifier.layoutPushConstant)
+ if (qualifier.isPushConstant())
error(loc, "cannot declare a default, can only be used on a block", "push_constant", "");
- if (qualifier.layoutBufferReference)
+ if (qualifier.hasBufferReference())
error(loc, "cannot declare a default, can only be used on a block", "buffer_reference", "");
if (qualifier.hasSpecConstantId())
error(loc, "cannot declare a default, can only be used on a scalar", "constant_id", "");
-#ifdef NV_EXTENSIONS
- if (qualifier.layoutShaderRecordNV)
+ if (qualifier.isShaderRecordNV())
error(loc, "cannot declare a default, can only be used on a block", "shaderRecordNV", "");
-#endif
}
//
@@ -8037,7 +8301,7 @@ TIntermNode* TParseContext::addSwitch(const TSourceLoc& loc, TIntermTyped* expre
// "it is an error to have no statement between a label and the end of the switch statement."
// The specifications were updated to remove this (being ill-defined what a "statement" was),
// so, this became a warning. However, 3.0 tests still check for the error.
- if (profile == EEsProfile && version <= 300 && ! relaxedErrors())
+ if (isEsProfile() && version <= 300 && ! relaxedErrors())
error(loc, "last case/default label not followed by statements", "switch", "");
else
warn(loc, "last case/default label not followed by statements", "switch", "");
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h
index a1ffe64dbf..39363f1a2a 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/ParseHelper.h
@@ -85,6 +85,7 @@ public:
statementNestingLevel(0), loopNestingLevel(0), structNestingLevel(0), controlFlowNestingLevel(0),
postEntryPointReturn(false),
contextPragma(true, false),
+ beginInvocationInterlockCount(0), endInvocationInterlockCount(0),
parsingBuiltins(parsingBuiltins), scanContext(nullptr), ppContext(nullptr),
limits(resources.limits),
globalUniformBlock(nullptr),
@@ -96,6 +97,7 @@ public:
}
virtual ~TParseContextBase() { }
+#if !defined(GLSLANG_WEB) || defined(GLSLANG_WEB_DEVEL)
virtual void C_DECL error(const TSourceLoc&, const char* szReason, const char* szToken,
const char* szExtraInfoFormat, ...);
virtual void C_DECL warn(const TSourceLoc&, const char* szReason, const char* szToken,
@@ -104,6 +106,7 @@ public:
const char* szExtraInfoFormat, ...);
virtual void C_DECL ppWarn(const TSourceLoc&, const char* szReason, const char* szToken,
const char* szExtraInfoFormat, ...);
+#endif
virtual void setLimits(const TBuiltInResource&) = 0;
@@ -149,8 +152,10 @@ public:
extensionCallback(line, extension, behavior);
}
+#ifdef ENABLE_HLSL
// Manage the global uniform block (default uniforms in GLSL, $Global in HLSL)
virtual void growGlobalUniformBlock(const TSourceLoc&, TType&, const TString& memberName, TTypeList* typeList = nullptr);
+#endif
// Potentially rename shader entry point function
void renameShaderFunction(TString*& name) const
@@ -182,6 +187,8 @@ public:
// the statementNestingLevel the current switch statement is at, which must match the level of its case statements
TList<int> switchLevel;
struct TPragma contextPragma;
+ int beginInvocationInterlockCount;
+ int endInvocationInterlockCount;
protected:
TParseContextBase(TParseContextBase&);
@@ -276,7 +283,7 @@ public:
const TString* entryPoint = nullptr);
virtual ~TParseContext();
- bool obeyPrecisionQualifiers() const { return precisionManager.respectingPrecisionQualifiers(); };
+ bool obeyPrecisionQualifiers() const { return precisionManager.respectingPrecisionQualifiers(); }
void setPrecisionDefaults();
void setLimits(const TBuiltInResource&) override;
@@ -294,10 +301,12 @@ public:
TIntermTyped* handleBracketDereference(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index);
void handleIndexLimits(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index);
+#ifndef GLSLANG_WEB
void makeEditable(TSymbol*&) override;
+ void ioArrayCheck(const TSourceLoc&, const TType&, const TString& identifier);
+#endif
bool isIoResizeArray(const TType&) const;
void fixIoArraySize(const TSourceLoc&, TType&);
- void ioArrayCheck(const TSourceLoc&, const TType&, const TString& identifier);
void handleIoResizeArrayAccess(const TSourceLoc&, TIntermTyped* base);
void checkIoArraysConsistency(const TSourceLoc&, bool tailOnly = false);
int getIoArrayImplicitSize(const TQualifier&, TString* featureString = nullptr) const;
@@ -401,6 +410,7 @@ public:
TIntermTyped* addConstructor(const TSourceLoc&, TIntermNode*, const TType&);
TIntermTyped* constructAggregate(TIntermNode*, const TType&, int, const TSourceLoc&);
TIntermTyped* constructBuiltIn(const TType&, TOperator, TIntermTyped*, const TSourceLoc&, bool subset);
+ void inheritMemoryQualifiers(const TQualifier& from, TQualifier& to);
void declareBlock(const TSourceLoc&, TTypeList& typeList, const TString* instanceName = 0, TArraySizes* arraySizes = 0);
void blockStageIoCheck(const TSourceLoc&, const TQualifier&);
void blockQualifierCheck(const TSourceLoc&, const TQualifier&, bool instanceName);
@@ -414,6 +424,7 @@ public:
void wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode);
TIntermNode* addSwitch(const TSourceLoc&, TIntermTyped* expression, TIntermAggregate* body);
+#ifndef GLSLANG_WEB
TAttributeType attributeFromName(const TString& name) const;
TAttributes* makeAttributes(const TString& identifier) const;
TAttributes* makeAttributes(const TString& identifier, TIntermNode* node) const;
@@ -422,11 +433,11 @@ public:
// Determine selection control from attributes
void handleSelectionAttributes(const TAttributes& attributes, TIntermNode*);
void handleSwitchAttributes(const TAttributes& attributes, TIntermNode*);
-
// Determine loop control from attributes
void handleLoopAttributes(const TAttributes& attributes, TIntermNode*);
+#endif
- void resizeMeshViewDimension(const TSourceLoc&, TType&);
+ void checkAndResizeMeshViewDim(const TSourceLoc&, TType&, bool isBlockMember);
protected:
void nonInitConstCheck(const TSourceLoc&, TString& identifier, TType& type);
@@ -438,7 +449,9 @@ protected:
bool isRuntimeLength(const TIntermTyped&) const;
TIntermNode* executeInitializer(const TSourceLoc&, TIntermTyped* initializer, TVariable* variable);
TIntermTyped* convertInitializerList(const TSourceLoc&, const TType&, TIntermTyped* initializer);
+#ifndef GLSLANG_WEB
void finish() override;
+#endif
public:
//
@@ -464,10 +477,11 @@ protected:
TQualifier globalUniformDefaults;
TQualifier globalInputDefaults;
TQualifier globalOutputDefaults;
- int* atomicUintOffsets; // to become an array of the right size to hold an offset per binding point
TString currentCaller; // name of last function body entered (not valid when at global scope)
- TIdSetType inductiveLoopIds;
+#ifndef GLSLANG_WEB
+ int* atomicUintOffsets; // to become an array of the right size to hold an offset per binding point
bool anyIndexLimits;
+ TIdSetType inductiveLoopIds;
TVector<TIntermTyped*> needsIndexLimitationChecking;
//
@@ -503,6 +517,7 @@ protected:
// array-sizing declarations
//
TVector<TSymbol*> ioArraySymbolResizeList;
+#endif
};
} // end namespace glslang
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp b/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp
index 482f6ba271..fd18fd4d7d 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Scan.cpp
@@ -324,7 +324,9 @@ struct str_hash
// A single global usable by all threads, by all versions, by all languages.
// After a single process-level initialization, this is read only and thread safe
std::unordered_map<const char*, int, str_hash, str_eq>* KeywordMap = nullptr;
+#ifndef GLSLANG_WEB
std::unordered_set<const char*, str_hash, str_eq>* ReservedSet = nullptr;
+#endif
};
@@ -341,9 +343,15 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["const"] = CONST;
(*KeywordMap)["uniform"] = UNIFORM;
- (*KeywordMap)["nonuniformEXT"] = NONUNIFORM;
+ (*KeywordMap)["buffer"] = BUFFER;
(*KeywordMap)["in"] = IN;
(*KeywordMap)["out"] = OUT;
+ (*KeywordMap)["smooth"] = SMOOTH;
+ (*KeywordMap)["flat"] = FLAT;
+ (*KeywordMap)["centroid"] = CENTROID;
+ (*KeywordMap)["invariant"] = INVARIANT;
+ (*KeywordMap)["packed"] = PACKED;
+ (*KeywordMap)["resource"] = RESOURCE;
(*KeywordMap)["inout"] = INOUT;
(*KeywordMap)["struct"] = STRUCT;
(*KeywordMap)["break"] = BREAK;
@@ -376,9 +384,33 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["mat4"] = MAT4;
(*KeywordMap)["true"] = BOOLCONSTANT;
(*KeywordMap)["false"] = BOOLCONSTANT;
+ (*KeywordMap)["layout"] = LAYOUT;
+ (*KeywordMap)["shared"] = SHARED;
+ (*KeywordMap)["highp"] = HIGH_PRECISION;
+ (*KeywordMap)["mediump"] = MEDIUM_PRECISION;
+ (*KeywordMap)["lowp"] = LOW_PRECISION;
+ (*KeywordMap)["superp"] = SUPERP;
+ (*KeywordMap)["precision"] = PRECISION;
+ (*KeywordMap)["mat2x2"] = MAT2X2;
+ (*KeywordMap)["mat2x3"] = MAT2X3;
+ (*KeywordMap)["mat2x4"] = MAT2X4;
+ (*KeywordMap)["mat3x2"] = MAT3X2;
+ (*KeywordMap)["mat3x3"] = MAT3X3;
+ (*KeywordMap)["mat3x4"] = MAT3X4;
+ (*KeywordMap)["mat4x2"] = MAT4X2;
+ (*KeywordMap)["mat4x3"] = MAT4X3;
+ (*KeywordMap)["mat4x4"] = MAT4X4;
+ (*KeywordMap)["uint"] = UINT;
+ (*KeywordMap)["uvec2"] = UVEC2;
+ (*KeywordMap)["uvec3"] = UVEC3;
+ (*KeywordMap)["uvec4"] = UVEC4;
+
+#ifndef GLSLANG_WEB
+ (*KeywordMap)["nonuniformEXT"] = NONUNIFORM;
+ (*KeywordMap)["demote"] = DEMOTE;
(*KeywordMap)["attribute"] = ATTRIBUTE;
(*KeywordMap)["varying"] = VARYING;
- (*KeywordMap)["buffer"] = BUFFER;
+ (*KeywordMap)["noperspective"] = NOPERSPECTIVE;
(*KeywordMap)["coherent"] = COHERENT;
(*KeywordMap)["devicecoherent"] = DEVICECOHERENT;
(*KeywordMap)["queuefamilycoherent"] = QUEUEFAMILYCOHERENT;
@@ -390,24 +422,9 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["writeonly"] = WRITEONLY;
(*KeywordMap)["atomic_uint"] = ATOMIC_UINT;
(*KeywordMap)["volatile"] = VOLATILE;
- (*KeywordMap)["layout"] = LAYOUT;
- (*KeywordMap)["shared"] = SHARED;
(*KeywordMap)["patch"] = PATCH;
(*KeywordMap)["sample"] = SAMPLE;
(*KeywordMap)["subroutine"] = SUBROUTINE;
- (*KeywordMap)["highp"] = HIGH_PRECISION;
- (*KeywordMap)["mediump"] = MEDIUM_PRECISION;
- (*KeywordMap)["lowp"] = LOW_PRECISION;
- (*KeywordMap)["precision"] = PRECISION;
- (*KeywordMap)["mat2x2"] = MAT2X2;
- (*KeywordMap)["mat2x3"] = MAT2X3;
- (*KeywordMap)["mat2x4"] = MAT2X4;
- (*KeywordMap)["mat3x2"] = MAT3X2;
- (*KeywordMap)["mat3x3"] = MAT3X3;
- (*KeywordMap)["mat3x4"] = MAT3X4;
- (*KeywordMap)["mat4x2"] = MAT4X2;
- (*KeywordMap)["mat4x3"] = MAT4X3;
- (*KeywordMap)["mat4x4"] = MAT4X4;
(*KeywordMap)["dmat2"] = DMAT2;
(*KeywordMap)["dmat3"] = DMAT3;
(*KeywordMap)["dmat4"] = DMAT4;
@@ -457,11 +474,6 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["dvec2"] = DVEC2;
(*KeywordMap)["dvec3"] = DVEC3;
(*KeywordMap)["dvec4"] = DVEC4;
- (*KeywordMap)["uint"] = UINT;
- (*KeywordMap)["uvec2"] = UVEC2;
- (*KeywordMap)["uvec3"] = UVEC3;
- (*KeywordMap)["uvec4"] = UVEC4;
-
(*KeywordMap)["int64_t"] = INT64_T;
(*KeywordMap)["uint64_t"] = UINT64_T;
(*KeywordMap)["i64vec2"] = I64VEC2;
@@ -548,19 +560,10 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["f64mat4x2"] = F64MAT4X2;
(*KeywordMap)["f64mat4x3"] = F64MAT4X3;
(*KeywordMap)["f64mat4x4"] = F64MAT4X4;
+#endif
(*KeywordMap)["sampler2D"] = SAMPLER2D;
(*KeywordMap)["samplerCube"] = SAMPLERCUBE;
- (*KeywordMap)["samplerCubeArray"] = SAMPLERCUBEARRAY;
- (*KeywordMap)["samplerCubeArrayShadow"] = SAMPLERCUBEARRAYSHADOW;
- (*KeywordMap)["isamplerCubeArray"] = ISAMPLERCUBEARRAY;
- (*KeywordMap)["usamplerCubeArray"] = USAMPLERCUBEARRAY;
- (*KeywordMap)["sampler1DArrayShadow"] = SAMPLER1DARRAYSHADOW;
- (*KeywordMap)["isampler1DArray"] = ISAMPLER1DARRAY;
- (*KeywordMap)["usampler1D"] = USAMPLER1D;
- (*KeywordMap)["isampler1D"] = ISAMPLER1D;
- (*KeywordMap)["usampler1DArray"] = USAMPLER1DARRAY;
- (*KeywordMap)["samplerBuffer"] = SAMPLERBUFFER;
(*KeywordMap)["samplerCubeShadow"] = SAMPLERCUBESHADOW;
(*KeywordMap)["sampler2DArray"] = SAMPLER2DARRAY;
(*KeywordMap)["sampler2DArrayShadow"] = SAMPLER2DARRAYSHADOW;
@@ -572,6 +575,39 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["usampler3D"] = USAMPLER3D;
(*KeywordMap)["usamplerCube"] = USAMPLERCUBE;
(*KeywordMap)["usampler2DArray"] = USAMPLER2DARRAY;
+ (*KeywordMap)["sampler3D"] = SAMPLER3D;
+ (*KeywordMap)["sampler2DShadow"] = SAMPLER2DSHADOW;
+
+ (*KeywordMap)["texture2D"] = TEXTURE2D;
+ (*KeywordMap)["textureCube"] = TEXTURECUBE;
+ (*KeywordMap)["texture2DArray"] = TEXTURE2DARRAY;
+ (*KeywordMap)["itexture2D"] = ITEXTURE2D;
+ (*KeywordMap)["itexture3D"] = ITEXTURE3D;
+ (*KeywordMap)["itextureCube"] = ITEXTURECUBE;
+ (*KeywordMap)["itexture2DArray"] = ITEXTURE2DARRAY;
+ (*KeywordMap)["utexture2D"] = UTEXTURE2D;
+ (*KeywordMap)["utexture3D"] = UTEXTURE3D;
+ (*KeywordMap)["utextureCube"] = UTEXTURECUBE;
+ (*KeywordMap)["utexture2DArray"] = UTEXTURE2DARRAY;
+ (*KeywordMap)["texture3D"] = TEXTURE3D;
+
+ (*KeywordMap)["sampler"] = SAMPLER;
+ (*KeywordMap)["samplerShadow"] = SAMPLERSHADOW;
+
+#ifndef GLSLANG_WEB
+ (*KeywordMap)["textureCubeArray"] = TEXTURECUBEARRAY;
+ (*KeywordMap)["itextureCubeArray"] = ITEXTURECUBEARRAY;
+ (*KeywordMap)["utextureCubeArray"] = UTEXTURECUBEARRAY;
+ (*KeywordMap)["samplerCubeArray"] = SAMPLERCUBEARRAY;
+ (*KeywordMap)["samplerCubeArrayShadow"] = SAMPLERCUBEARRAYSHADOW;
+ (*KeywordMap)["isamplerCubeArray"] = ISAMPLERCUBEARRAY;
+ (*KeywordMap)["usamplerCubeArray"] = USAMPLERCUBEARRAY;
+ (*KeywordMap)["sampler1DArrayShadow"] = SAMPLER1DARRAYSHADOW;
+ (*KeywordMap)["isampler1DArray"] = ISAMPLER1DARRAY;
+ (*KeywordMap)["usampler1D"] = USAMPLER1D;
+ (*KeywordMap)["isampler1D"] = ISAMPLER1D;
+ (*KeywordMap)["usampler1DArray"] = USAMPLER1DARRAY;
+ (*KeywordMap)["samplerBuffer"] = SAMPLERBUFFER;
(*KeywordMap)["isampler2DRect"] = ISAMPLER2DRECT;
(*KeywordMap)["usampler2DRect"] = USAMPLER2DRECT;
(*KeywordMap)["isamplerBuffer"] = ISAMPLERBUFFER;
@@ -584,8 +620,6 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["usampler2DMSArray"] = USAMPLER2DMSARRAY;
(*KeywordMap)["sampler1D"] = SAMPLER1D;
(*KeywordMap)["sampler1DShadow"] = SAMPLER1DSHADOW;
- (*KeywordMap)["sampler3D"] = SAMPLER3D;
- (*KeywordMap)["sampler2DShadow"] = SAMPLER2DSHADOW;
(*KeywordMap)["sampler2DRect"] = SAMPLER2DRECT;
(*KeywordMap)["sampler2DRectShadow"] = SAMPLER2DRECTSHADOW;
(*KeywordMap)["sampler1DArray"] = SAMPLER1DARRAY;
@@ -594,28 +628,11 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["__samplerExternal2DY2YEXT"] = SAMPLEREXTERNAL2DY2YEXT; // GL_EXT_YUV_target
- (*KeywordMap)["sampler"] = SAMPLER;
- (*KeywordMap)["samplerShadow"] = SAMPLERSHADOW;
-
- (*KeywordMap)["texture2D"] = TEXTURE2D;
- (*KeywordMap)["textureCube"] = TEXTURECUBE;
- (*KeywordMap)["textureCubeArray"] = TEXTURECUBEARRAY;
- (*KeywordMap)["itextureCubeArray"] = ITEXTURECUBEARRAY;
- (*KeywordMap)["utextureCubeArray"] = UTEXTURECUBEARRAY;
(*KeywordMap)["itexture1DArray"] = ITEXTURE1DARRAY;
(*KeywordMap)["utexture1D"] = UTEXTURE1D;
(*KeywordMap)["itexture1D"] = ITEXTURE1D;
(*KeywordMap)["utexture1DArray"] = UTEXTURE1DARRAY;
(*KeywordMap)["textureBuffer"] = TEXTUREBUFFER;
- (*KeywordMap)["texture2DArray"] = TEXTURE2DARRAY;
- (*KeywordMap)["itexture2D"] = ITEXTURE2D;
- (*KeywordMap)["itexture3D"] = ITEXTURE3D;
- (*KeywordMap)["itextureCube"] = ITEXTURECUBE;
- (*KeywordMap)["itexture2DArray"] = ITEXTURE2DARRAY;
- (*KeywordMap)["utexture2D"] = UTEXTURE2D;
- (*KeywordMap)["utexture3D"] = UTEXTURE3D;
- (*KeywordMap)["utextureCube"] = UTEXTURECUBE;
- (*KeywordMap)["utexture2DArray"] = UTEXTURE2DARRAY;
(*KeywordMap)["itexture2DRect"] = ITEXTURE2DRECT;
(*KeywordMap)["utexture2DRect"] = UTEXTURE2DRECT;
(*KeywordMap)["itextureBuffer"] = ITEXTUREBUFFER;
@@ -627,7 +644,6 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["itexture2DMSArray"] = ITEXTURE2DMSARRAY;
(*KeywordMap)["utexture2DMSArray"] = UTEXTURE2DMSARRAY;
(*KeywordMap)["texture1D"] = TEXTURE1D;
- (*KeywordMap)["texture3D"] = TEXTURE3D;
(*KeywordMap)["texture2DRect"] = TEXTURE2DRECT;
(*KeywordMap)["texture1DArray"] = TEXTURE1DARRAY;
@@ -638,7 +654,6 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["usubpassInput"] = USUBPASSINPUT;
(*KeywordMap)["usubpassInputMS"] = USUBPASSINPUTMS;
-#ifdef AMD_EXTENSIONS
(*KeywordMap)["f16sampler1D"] = F16SAMPLER1D;
(*KeywordMap)["f16sampler2D"] = F16SAMPLER2D;
(*KeywordMap)["f16sampler3D"] = F16SAMPLER3D;
@@ -684,25 +699,10 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["f16subpassInput"] = F16SUBPASSINPUT;
(*KeywordMap)["f16subpassInputMS"] = F16SUBPASSINPUTMS;
-#endif
-
- (*KeywordMap)["noperspective"] = NOPERSPECTIVE;
- (*KeywordMap)["smooth"] = SMOOTH;
- (*KeywordMap)["flat"] = FLAT;
-#ifdef AMD_EXTENSIONS
(*KeywordMap)["__explicitInterpAMD"] = EXPLICITINTERPAMD;
-#endif
- (*KeywordMap)["centroid"] = CENTROID;
-#ifdef NV_EXTENSIONS
(*KeywordMap)["pervertexNV"] = PERVERTEXNV;
-#endif
(*KeywordMap)["precise"] = PRECISE;
- (*KeywordMap)["invariant"] = INVARIANT;
- (*KeywordMap)["packed"] = PACKED;
- (*KeywordMap)["resource"] = RESOURCE;
- (*KeywordMap)["superp"] = SUPERP;
-#ifdef NV_EXTENSIONS
(*KeywordMap)["rayPayloadNV"] = PAYLOADNV;
(*KeywordMap)["rayPayloadInNV"] = PAYLOADINNV;
(*KeywordMap)["hitAttributeNV"] = HITATTRNV;
@@ -712,9 +712,10 @@ void TScanContext::fillInKeywordMap()
(*KeywordMap)["perprimitiveNV"] = PERPRIMITIVENV;
(*KeywordMap)["perviewNV"] = PERVIEWNV;
(*KeywordMap)["taskNV"] = PERTASKNV;
-#endif
(*KeywordMap)["fcoopmatNV"] = FCOOPMATNV;
+ (*KeywordMap)["icoopmatNV"] = ICOOPMATNV;
+ (*KeywordMap)["ucoopmatNV"] = UCOOPMATNV;
ReservedSet = new std::unordered_set<const char*, str_hash, str_eq>;
@@ -755,14 +756,17 @@ void TScanContext::fillInKeywordMap()
ReservedSet->insert("cast");
ReservedSet->insert("namespace");
ReservedSet->insert("using");
+#endif
}
void TScanContext::deleteKeywordMap()
{
delete KeywordMap;
KeywordMap = nullptr;
+#ifndef GLSLANG_WEB
delete ReservedSet;
ReservedSet = nullptr;
+#endif
}
// Called by yylex to get the next token.
@@ -841,13 +845,15 @@ int TScanContext::tokenize(TPpContext* pp, TParserToken& token)
case PpAtomConstInt: parserToken->sType.lex.i = ppToken.ival; return INTCONSTANT;
case PpAtomConstUint: parserToken->sType.lex.i = ppToken.ival; return UINTCONSTANT;
+ case PpAtomConstFloat: parserToken->sType.lex.d = ppToken.dval; return FLOATCONSTANT;
+#ifndef GLSLANG_WEB
case PpAtomConstInt16: parserToken->sType.lex.i = ppToken.ival; return INT16CONSTANT;
case PpAtomConstUint16: parserToken->sType.lex.i = ppToken.ival; return UINT16CONSTANT;
case PpAtomConstInt64: parserToken->sType.lex.i64 = ppToken.i64val; return INT64CONSTANT;
case PpAtomConstUint64: parserToken->sType.lex.i64 = ppToken.i64val; return UINT64CONSTANT;
- case PpAtomConstFloat: parserToken->sType.lex.d = ppToken.dval; return FLOATCONSTANT;
case PpAtomConstDouble: parserToken->sType.lex.d = ppToken.dval; return DOUBLECONSTANT;
case PpAtomConstFloat16: parserToken->sType.lex.d = ppToken.dval; return FLOAT16CONSTANT;
+#endif
case PpAtomIdentifier:
{
int token = tokenizeIdentifier();
@@ -869,8 +875,10 @@ int TScanContext::tokenize(TPpContext* pp, TParserToken& token)
int TScanContext::tokenizeIdentifier()
{
+#ifndef GLSLANG_WEB
if (ReservedSet->find(tokenText) != ReservedSet->end())
return reservedWord();
+#endif
auto it = KeywordMap->find(tokenText);
if (it == KeywordMap->end()) {
@@ -897,20 +905,21 @@ int TScanContext::tokenizeIdentifier()
case CASE:
return keyword;
+ case BUFFER:
+ afterBuffer = true;
+ if ((parseContext.isEsProfile() && parseContext.version < 310) ||
+ (!parseContext.isEsProfile() && parseContext.version < 430))
+ return identifierOrType();
+ return keyword;
+
case STRUCT:
afterStruct = true;
return keyword;
- case NONUNIFORM:
- if (parseContext.extensionTurnedOn(E_GL_EXT_nonuniform_qualifier))
- return keyword;
- else
- return identifierOrType();
-
case SWITCH:
case DEFAULT:
- if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < 130))
+ if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < 130))
reservedWord();
return keyword;
@@ -942,20 +951,59 @@ int TScanContext::tokenizeIdentifier()
parserToken->sType.lex.b = false;
return keyword;
- case ATTRIBUTE:
- case VARYING:
- if (parseContext.profile == EEsProfile && parseContext.version >= 300)
+ case SMOOTH:
+ if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < 130))
+ return identifierOrType();
+ return keyword;
+ case FLAT:
+ if (parseContext.isEsProfile() && parseContext.version < 300)
reservedWord();
+ else if (!parseContext.isEsProfile() && parseContext.version < 130)
+ return identifierOrType();
return keyword;
-
- case BUFFER:
- afterBuffer = true;
- if ((parseContext.profile == EEsProfile && parseContext.version < 310) ||
- (parseContext.profile != EEsProfile && parseContext.version < 430))
+ case CENTROID:
+ if (parseContext.version < 120)
+ return identifierOrType();
+ return keyword;
+ case INVARIANT:
+ if (!parseContext.isEsProfile() && parseContext.version < 120)
return identifierOrType();
return keyword;
+ case PACKED:
+ if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < 330))
+ return reservedWord();
+ return identifierOrType();
+
+ case RESOURCE:
+ {
+ bool reserved = (parseContext.isEsProfile() && parseContext.version >= 300) ||
+ (!parseContext.isEsProfile() && parseContext.version >= 420);
+ return identifierOrReserved(reserved);
+ }
+ case SUPERP:
+ {
+ bool reserved = parseContext.isEsProfile() || parseContext.version >= 130;
+ return identifierOrReserved(reserved);
+ }
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
+ case NOPERSPECTIVE:
+ if (parseContext.extensionTurnedOn(E_GL_NV_shader_noperspective_interpolation))
+ return keyword;
+ return es30ReservedFromGLSL(130);
+
+ case NONUNIFORM:
+ if (parseContext.extensionTurnedOn(E_GL_EXT_nonuniform_qualifier))
+ return keyword;
+ else
+ return identifierOrType();
+ case ATTRIBUTE:
+ case VARYING:
+ if (parseContext.isEsProfile() && parseContext.version >= 300)
+ reservedWord();
+ return keyword;
case PAYLOADNV:
case PAYLOADINNV:
case HITATTRNV:
@@ -963,14 +1011,11 @@ int TScanContext::tokenizeIdentifier()
case CALLDATAINNV:
case ACCSTRUCTNV:
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile && parseContext.version >= 460
- && parseContext.extensionTurnedOn(E_GL_NV_ray_tracing)))
+ parseContext.extensionTurnedOn(E_GL_NV_ray_tracing))
return keyword;
return identifierOrType();
-#endif
-
case ATOMIC_UINT:
- if ((parseContext.profile == EEsProfile && parseContext.version >= 310) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 310) ||
parseContext.extensionTurnedOn(E_GL_ARB_shader_atomic_counters))
return keyword;
return es30ReservedFromGLSL(420);
@@ -984,53 +1029,51 @@ int TScanContext::tokenizeIdentifier()
case RESTRICT:
case READONLY:
case WRITEONLY:
- if (parseContext.profile == EEsProfile && parseContext.version >= 310)
+ if (parseContext.isEsProfile() && parseContext.version >= 310)
return keyword;
return es30ReservedFromGLSL(parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store) ? 130 : 420);
-
case VOLATILE:
- if (parseContext.profile == EEsProfile && parseContext.version >= 310)
+ if (parseContext.isEsProfile() && parseContext.version >= 310)
return keyword;
- if (! parseContext.symbolTable.atBuiltInLevel() && (parseContext.profile == EEsProfile ||
+ if (! parseContext.symbolTable.atBuiltInLevel() && (parseContext.isEsProfile() ||
(parseContext.version < 420 && ! parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store))))
reservedWord();
return keyword;
-
- case LAYOUT:
- {
- const int numLayoutExts = 2;
- const char* layoutExts[numLayoutExts] = { E_GL_ARB_shading_language_420pack,
- E_GL_ARB_explicit_attrib_location };
- if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < 140 &&
- ! parseContext.extensionsTurnedOn(numLayoutExts, layoutExts)))
- return identifierOrType();
- return keyword;
- }
- case SHARED:
- if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < 140))
- return identifierOrType();
- return keyword;
-
case PATCH:
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile == EEsProfile &&
+ (parseContext.isEsProfile() &&
(parseContext.version >= 320 ||
parseContext.extensionsTurnedOn(Num_AEP_tessellation_shader, AEP_tessellation_shader))) ||
- (parseContext.profile != EEsProfile && parseContext.extensionTurnedOn(E_GL_ARB_tessellation_shader)))
+ (!parseContext.isEsProfile() && parseContext.extensionTurnedOn(E_GL_ARB_tessellation_shader)))
return keyword;
return es30ReservedFromGLSL(400);
case SAMPLE:
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(1, &E_GL_OES_shader_multisample_interpolation))
return keyword;
return es30ReservedFromGLSL(400);
case SUBROUTINE:
return es30ReservedFromGLSL(400);
+#endif
+ case SHARED:
+ if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < 140))
+ return identifierOrType();
+ return keyword;
+ case LAYOUT:
+ {
+ const int numLayoutExts = 2;
+ const char* layoutExts[numLayoutExts] = { E_GL_ARB_shading_language_420pack,
+ E_GL_ARB_explicit_attrib_location };
+ if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < 140 &&
+ ! parseContext.extensionsTurnedOn(numLayoutExts, layoutExts)))
+ return identifierOrType();
+ return keyword;
+ }
case HIGH_PRECISION:
case MEDIUM_PRECISION:
@@ -1049,6 +1092,7 @@ int TScanContext::tokenizeIdentifier()
case MAT4X4:
return matNxM();
+#ifndef GLSLANG_WEB
case DMAT2:
case DMAT3:
case DMAT4:
@@ -1079,7 +1123,7 @@ int TScanContext::tokenizeIdentifier()
case IIMAGEBUFFER:
case UIMAGEBUFFER:
afterType = true;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(Num_AEP_texture_buffer, AEP_texture_buffer))
return keyword;
return firstGenerationImage(false);
@@ -1103,7 +1147,7 @@ int TScanContext::tokenizeIdentifier()
case IIMAGECUBEARRAY:
case UIMAGECUBEARRAY:
afterType = true;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(Num_AEP_texture_cube_map_array, AEP_texture_cube_map_array))
return keyword;
return secondGenerationImage();
@@ -1122,7 +1166,10 @@ int TScanContext::tokenizeIdentifier()
case DVEC3:
case DVEC4:
afterType = true;
- if (parseContext.profile == EEsProfile || parseContext.version < 400)
+ if (parseContext.isEsProfile() || parseContext.version < 150 ||
+ (!parseContext.symbolTable.atBuiltInLevel() &&
+ parseContext.version < 400 &&
+ !parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_fp64)))
reservedWord();
return keyword;
@@ -1136,10 +1183,9 @@ int TScanContext::tokenizeIdentifier()
case U64VEC4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile && parseContext.version >= 450 &&
- (parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_int64) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int64))))
+ parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_int64) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int64))
return keyword;
return identifierOrType();
@@ -1153,10 +1199,9 @@ int TScanContext::tokenizeIdentifier()
case U8VEC4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- ((parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_8bit_storage) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int8)) &&
- parseContext.profile != EEsProfile && parseContext.version >= 450))
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_8bit_storage) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int8))
return keyword;
return identifierOrType();
@@ -1170,14 +1215,10 @@ int TScanContext::tokenizeIdentifier()
case U16VEC4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile && parseContext.version >= 450 &&
- (
-#ifdef AMD_EXTENSIONS
- parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_int16) ||
-#endif
- parseContext.extensionTurnedOn(E_GL_EXT_shader_16bit_storage) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int16))))
+ parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_int16) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_16bit_storage) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int16))
return keyword;
return identifierOrType();
case INT32_T:
@@ -1190,9 +1231,8 @@ int TScanContext::tokenizeIdentifier()
case U32VEC4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- ((parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int32)) &&
- parseContext.profile != EEsProfile && parseContext.version >= 450))
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_int32))
return keyword;
return identifierOrType();
case FLOAT32_T:
@@ -1213,9 +1253,8 @@ int TScanContext::tokenizeIdentifier()
case F32MAT4X4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- ((parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float32)) &&
- parseContext.profile != EEsProfile && parseContext.version >= 450))
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float32))
return keyword;
return identifierOrType();
@@ -1237,9 +1276,8 @@ int TScanContext::tokenizeIdentifier()
case F64MAT4X4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- ((parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float64)) &&
- parseContext.profile != EEsProfile && parseContext.version >= 450))
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float64))
return keyword;
return identifierOrType();
@@ -1249,14 +1287,10 @@ int TScanContext::tokenizeIdentifier()
case F16VEC4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile && parseContext.version >= 450 &&
- (
-#ifdef AMD_EXTENSIONS
- parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float) ||
-#endif
- parseContext.extensionTurnedOn(E_GL_EXT_shader_16bit_storage) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float16))))
+ parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_16bit_storage) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float16))
return keyword;
return identifierOrType();
@@ -1275,13 +1309,9 @@ int TScanContext::tokenizeIdentifier()
case F16MAT4X4:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile && parseContext.version >= 450 &&
- (
-#ifdef AMD_EXTENSIONS
- parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float) ||
-#endif
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
- parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float16))))
+ parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ parseContext.extensionTurnedOn(E_GL_EXT_shader_explicit_arithmetic_types_float16))
return keyword;
return identifierOrType();
@@ -1291,20 +1321,21 @@ int TScanContext::tokenizeIdentifier()
case ISAMPLERCUBEARRAY:
case USAMPLERCUBEARRAY:
afterType = true;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(Num_AEP_texture_cube_map_array, AEP_texture_cube_map_array))
return keyword;
- if (parseContext.profile == EEsProfile || (parseContext.version < 400 && ! parseContext.extensionTurnedOn(E_GL_ARB_texture_cube_map_array)))
+ if (parseContext.isEsProfile() || (parseContext.version < 400 && ! parseContext.extensionTurnedOn(E_GL_ARB_texture_cube_map_array)))
reservedWord();
return keyword;
- case ISAMPLER1D:
- case ISAMPLER1DARRAY:
- case SAMPLER1DARRAYSHADOW:
- case USAMPLER1D:
- case USAMPLER1DARRAY:
- afterType = true;
- return es30ReservedFromGLSL(130);
+ case TEXTURECUBEARRAY:
+ case ITEXTURECUBEARRAY:
+ case UTEXTURECUBEARRAY:
+ if (parseContext.spvVersion.vulkan > 0)
+ return keyword;
+ else
+ return identifierOrType();
+#endif
case UINT:
case UVEC2:
@@ -1324,6 +1355,49 @@ int TScanContext::tokenizeIdentifier()
afterType = true;
return nonreservedKeyword(300, 130);
+ case SAMPLER3D:
+ afterType = true;
+ if (parseContext.isEsProfile() && parseContext.version < 300) {
+ if (!parseContext.extensionTurnedOn(E_GL_OES_texture_3D))
+ reservedWord();
+ }
+ return keyword;
+
+ case SAMPLER2DSHADOW:
+ afterType = true;
+ if (parseContext.isEsProfile() && parseContext.version < 300) {
+ if (!parseContext.extensionTurnedOn(E_GL_EXT_shadow_samplers))
+ reservedWord();
+ }
+ return keyword;
+
+ case TEXTURE2D:
+ case TEXTURECUBE:
+ case TEXTURE2DARRAY:
+ case ITEXTURE2D:
+ case ITEXTURE3D:
+ case ITEXTURECUBE:
+ case ITEXTURE2DARRAY:
+ case UTEXTURE2D:
+ case UTEXTURE3D:
+ case UTEXTURECUBE:
+ case UTEXTURE2DARRAY:
+ case TEXTURE3D:
+ case SAMPLER:
+ case SAMPLERSHADOW:
+ if (parseContext.spvVersion.vulkan > 0)
+ return keyword;
+ else
+ return identifierOrType();
+
+#ifndef GLSLANG_WEB
+ case ISAMPLER1D:
+ case ISAMPLER1DARRAY:
+ case SAMPLER1DARRAYSHADOW:
+ case USAMPLER1D:
+ case USAMPLER1DARRAY:
+ afterType = true;
+ return es30ReservedFromGLSL(130);
case ISAMPLER2DRECT:
case USAMPLER2DRECT:
afterType = true;
@@ -1331,7 +1405,7 @@ int TScanContext::tokenizeIdentifier()
case SAMPLERBUFFER:
afterType = true;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(Num_AEP_texture_buffer, AEP_texture_buffer))
return keyword;
return es30ReservedFromGLSL(130);
@@ -1339,7 +1413,7 @@ int TScanContext::tokenizeIdentifier()
case ISAMPLERBUFFER:
case USAMPLERBUFFER:
afterType = true;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(Num_AEP_texture_buffer, AEP_texture_buffer))
return keyword;
return es30ReservedFromGLSL(140);
@@ -1348,7 +1422,10 @@ int TScanContext::tokenizeIdentifier()
case ISAMPLER2DMS:
case USAMPLER2DMS:
afterType = true;
- if (parseContext.profile == EEsProfile && parseContext.version >= 310)
+ if (parseContext.isEsProfile() && parseContext.version >= 310)
+ return keyword;
+ if (!parseContext.isEsProfile() && (parseContext.version > 140 ||
+ (parseContext.version == 140 && parseContext.extensionsTurnedOn(1, &E_GL_ARB_texture_multisample))))
return keyword;
return es30ReservedFromGLSL(150);
@@ -1356,38 +1433,25 @@ int TScanContext::tokenizeIdentifier()
case ISAMPLER2DMSARRAY:
case USAMPLER2DMSARRAY:
afterType = true;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionsTurnedOn(1, &E_GL_OES_texture_storage_multisample_2d_array))
return keyword;
+ if (!parseContext.isEsProfile() && (parseContext.version > 140 ||
+ (parseContext.version == 140 && parseContext.extensionsTurnedOn(1, &E_GL_ARB_texture_multisample))))
+ return keyword;
return es30ReservedFromGLSL(150);
case SAMPLER1D:
case SAMPLER1DSHADOW:
afterType = true;
- if (parseContext.profile == EEsProfile)
+ if (parseContext.isEsProfile())
reservedWord();
return keyword;
- case SAMPLER3D:
- afterType = true;
- if (parseContext.profile == EEsProfile && parseContext.version < 300) {
- if (!parseContext.extensionTurnedOn(E_GL_OES_texture_3D))
- reservedWord();
- }
- return keyword;
-
- case SAMPLER2DSHADOW:
- afterType = true;
- if (parseContext.profile == EEsProfile && parseContext.version < 300) {
- if (!parseContext.extensionTurnedOn(E_GL_EXT_shadow_samplers))
- reservedWord();
- }
- return keyword;
-
case SAMPLER2DRECT:
case SAMPLER2DRECTSHADOW:
afterType = true;
- if (parseContext.profile == EEsProfile)
+ if (parseContext.isEsProfile())
reservedWord();
else if (parseContext.version < 140 && ! parseContext.symbolTable.atBuiltInLevel() && ! parseContext.extensionTurnedOn(E_GL_ARB_texture_rectangle)) {
if (parseContext.relaxedErrors())
@@ -1399,10 +1463,10 @@ int TScanContext::tokenizeIdentifier()
case SAMPLER1DARRAY:
afterType = true;
- if (parseContext.profile == EEsProfile && parseContext.version == 300)
+ if (parseContext.isEsProfile() && parseContext.version == 300)
reservedWord();
- else if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < 130))
+ else if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < 130))
return identifierOrType();
return keyword;
@@ -1421,25 +1485,11 @@ int TScanContext::tokenizeIdentifier()
return keyword;
return identifierOrType();
- case TEXTURE2D:
- case TEXTURECUBE:
- case TEXTURECUBEARRAY:
- case ITEXTURECUBEARRAY:
- case UTEXTURECUBEARRAY:
case ITEXTURE1DARRAY:
case UTEXTURE1D:
case ITEXTURE1D:
case UTEXTURE1DARRAY:
case TEXTUREBUFFER:
- case TEXTURE2DARRAY:
- case ITEXTURE2D:
- case ITEXTURE3D:
- case ITEXTURECUBE:
- case ITEXTURE2DARRAY:
- case UTEXTURE2D:
- case UTEXTURE3D:
- case UTEXTURECUBE:
- case UTEXTURE2DARRAY:
case ITEXTURE2DRECT:
case UTEXTURE2DRECT:
case ITEXTUREBUFFER:
@@ -1451,11 +1501,8 @@ int TScanContext::tokenizeIdentifier()
case ITEXTURE2DMSARRAY:
case UTEXTURE2DMSARRAY:
case TEXTURE1D:
- case TEXTURE3D:
case TEXTURE2DRECT:
case TEXTURE1DARRAY:
- case SAMPLER:
- case SAMPLERSHADOW:
if (parseContext.spvVersion.vulkan > 0)
return keyword;
else
@@ -1472,7 +1519,6 @@ int TScanContext::tokenizeIdentifier()
else
return identifierOrType();
-#ifdef AMD_EXTENSIONS
case F16SAMPLER1D:
case F16SAMPLER2D:
case F16SAMPLER3D:
@@ -1520,99 +1566,40 @@ int TScanContext::tokenizeIdentifier()
case F16SUBPASSINPUTMS:
afterType = true;
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float_fetch) &&
- parseContext.profile != EEsProfile && parseContext.version >= 450))
+ parseContext.extensionTurnedOn(E_GL_AMD_gpu_shader_half_float_fetch))
return keyword;
return identifierOrType();
-#endif
-
- case NOPERSPECTIVE:
-#ifdef NV_EXTENSIONS
- if (parseContext.profile == EEsProfile && parseContext.version >= 300 &&
- parseContext.extensionTurnedOn(E_GL_NV_shader_noperspective_interpolation))
- return keyword;
-#endif
- return es30ReservedFromGLSL(130);
- case SMOOTH:
- if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < 130))
- return identifierOrType();
- return keyword;
-
-#ifdef AMD_EXTENSIONS
case EXPLICITINTERPAMD:
- if (parseContext.profile != EEsProfile && parseContext.version >= 450 &&
- parseContext.extensionTurnedOn(E_GL_AMD_shader_explicit_vertex_parameter))
+ if (parseContext.extensionTurnedOn(E_GL_AMD_shader_explicit_vertex_parameter))
return keyword;
return identifierOrType();
-#endif
-#ifdef NV_EXTENSIONS
case PERVERTEXNV:
- if (((parseContext.profile != EEsProfile && parseContext.version >= 450) ||
- (parseContext.profile == EEsProfile && parseContext.version >= 320)) &&
+ if ((!parseContext.isEsProfile() && parseContext.version >= 450) ||
parseContext.extensionTurnedOn(E_GL_NV_fragment_shader_barycentric))
return keyword;
return identifierOrType();
-#endif
-
- case FLAT:
- if (parseContext.profile == EEsProfile && parseContext.version < 300)
- reservedWord();
- else if (parseContext.profile != EEsProfile && parseContext.version < 130)
- return identifierOrType();
- return keyword;
-
- case CENTROID:
- if (parseContext.version < 120)
- return identifierOrType();
- return keyword;
case PRECISE:
- if ((parseContext.profile == EEsProfile &&
+ if ((parseContext.isEsProfile() &&
(parseContext.version >= 320 || parseContext.extensionsTurnedOn(Num_AEP_gpu_shader5, AEP_gpu_shader5))) ||
- (parseContext.profile != EEsProfile && parseContext.version >= 400))
+ (!parseContext.isEsProfile() && parseContext.version >= 400))
return keyword;
- if (parseContext.profile == EEsProfile && parseContext.version == 310) {
+ if (parseContext.isEsProfile() && parseContext.version == 310) {
reservedWord();
return keyword;
}
return identifierOrType();
- case INVARIANT:
- if (parseContext.profile != EEsProfile && parseContext.version < 120)
- return identifierOrType();
- return keyword;
-
- case PACKED:
- if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < 330))
- return reservedWord();
- return identifierOrType();
-
- case RESOURCE:
- {
- bool reserved = (parseContext.profile == EEsProfile && parseContext.version >= 300) ||
- (parseContext.profile != EEsProfile && parseContext.version >= 420);
- return identifierOrReserved(reserved);
- }
- case SUPERP:
- {
- bool reserved = parseContext.profile == EEsProfile || parseContext.version >= 130;
- return identifierOrReserved(reserved);
- }
-
-#ifdef NV_EXTENSIONS
case PERPRIMITIVENV:
case PERVIEWNV:
case PERTASKNV:
- if ((parseContext.profile != EEsProfile && parseContext.version >= 450) ||
- (parseContext.profile == EEsProfile && parseContext.version >= 320) ||
+ if ((!parseContext.isEsProfile() && parseContext.version >= 450) ||
+ (parseContext.isEsProfile() && parseContext.version >= 320) ||
parseContext.extensionTurnedOn(E_GL_NV_mesh_shader))
return keyword;
return identifierOrType();
-#endif
case FCOOPMATNV:
afterType = true;
@@ -1621,6 +1608,21 @@ int TScanContext::tokenizeIdentifier()
return keyword;
return identifierOrType();
+ case UCOOPMATNV:
+ case ICOOPMATNV:
+ afterType = true;
+ if (parseContext.symbolTable.atBuiltInLevel() ||
+ parseContext.extensionTurnedOn(E_GL_NV_integer_cooperative_matrix))
+ return keyword;
+ return identifierOrType();
+
+ case DEMOTE:
+ if (parseContext.extensionTurnedOn(E_GL_EXT_demote_to_helper_invocation))
+ return keyword;
+ else
+ return identifierOrType();
+#endif
+
default:
parseContext.infoSink.info.message(EPrefixInternalError, "Unknown glslang keyword", loc);
return 0;
@@ -1638,7 +1640,7 @@ int TScanContext::identifierOrType()
if (const TVariable* variable = parserToken->sType.lex.symbol->getAsVariable()) {
if (variable->isUserType() &&
// treat redeclaration of forward-declared buffer/uniform reference as an identifier
- !(variable->getType().getBasicType() == EbtReference && afterBuffer)) {
+ !(variable->getType().isReference() && afterBuffer)) {
afterType = true;
return TYPE_NAME;
@@ -1668,7 +1670,7 @@ int TScanContext::identifierOrReserved(bool reserved)
return 0;
}
- if (parseContext.forwardCompatible)
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using future reserved keyword", tokenText, "");
return identifierOrType();
@@ -1681,13 +1683,13 @@ int TScanContext::es30ReservedFromGLSL(int version)
if (parseContext.symbolTable.atBuiltInLevel())
return keyword;
- if ((parseContext.profile == EEsProfile && parseContext.version < 300) ||
- (parseContext.profile != EEsProfile && parseContext.version < version)) {
- if (parseContext.forwardCompatible)
+ if ((parseContext.isEsProfile() && parseContext.version < 300) ||
+ (!parseContext.isEsProfile() && parseContext.version < version)) {
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "future reserved word in ES 300 and keyword in GLSL", tokenText, "");
return identifierOrType();
- } else if (parseContext.profile == EEsProfile && parseContext.version >= 300)
+ } else if (parseContext.isEsProfile() && parseContext.version >= 300)
reservedWord();
return keyword;
@@ -1697,9 +1699,9 @@ int TScanContext::es30ReservedFromGLSL(int version)
// showed up, both in an es version and a non-ES version.
int TScanContext::nonreservedKeyword(int esVersion, int nonEsVersion)
{
- if ((parseContext.profile == EEsProfile && parseContext.version < esVersion) ||
- (parseContext.profile != EEsProfile && parseContext.version < nonEsVersion)) {
- if (parseContext.forwardCompatible)
+ if ((parseContext.isEsProfile() && parseContext.version < esVersion) ||
+ (!parseContext.isEsProfile() && parseContext.version < nonEsVersion)) {
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using future keyword", tokenText, "");
return identifierOrType();
@@ -1710,10 +1712,10 @@ int TScanContext::nonreservedKeyword(int esVersion, int nonEsVersion)
int TScanContext::precisionKeyword()
{
- if (parseContext.profile == EEsProfile || parseContext.version >= 130)
+ if (parseContext.isEsProfile() || parseContext.version >= 130)
return keyword;
- if (parseContext.forwardCompatible)
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using ES precision qualifier keyword", tokenText, "");
return identifierOrType();
@@ -1726,7 +1728,7 @@ int TScanContext::matNxM()
if (parseContext.version > 110)
return keyword;
- if (parseContext.forwardCompatible)
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using future non-square matrix type keyword", tokenText, "");
return identifierOrType();
@@ -1736,16 +1738,18 @@ int TScanContext::dMat()
{
afterType = true;
- if (parseContext.profile == EEsProfile && parseContext.version >= 300) {
+ if (parseContext.isEsProfile() && parseContext.version >= 300) {
reservedWord();
return keyword;
}
- if (parseContext.profile != EEsProfile && parseContext.version >= 400)
+ if (!parseContext.isEsProfile() && (parseContext.version >= 400 ||
+ parseContext.symbolTable.atBuiltInLevel() ||
+ (parseContext.version >= 150 && parseContext.extensionTurnedOn(E_GL_ARB_gpu_shader_fp64))))
return keyword;
- if (parseContext.forwardCompatible)
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using future type keyword", tokenText, "");
return identifierOrType();
@@ -1754,19 +1758,19 @@ int TScanContext::dMat()
int TScanContext::firstGenerationImage(bool inEs310)
{
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile && (parseContext.version >= 420 ||
+ (!parseContext.isEsProfile() && (parseContext.version >= 420 ||
parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store))) ||
- (inEs310 && parseContext.profile == EEsProfile && parseContext.version >= 310))
+ (inEs310 && parseContext.isEsProfile() && parseContext.version >= 310))
return keyword;
- if ((parseContext.profile == EEsProfile && parseContext.version >= 300) ||
- (parseContext.profile != EEsProfile && parseContext.version >= 130)) {
+ if ((parseContext.isEsProfile() && parseContext.version >= 300) ||
+ (!parseContext.isEsProfile() && parseContext.version >= 130)) {
reservedWord();
return keyword;
}
- if (parseContext.forwardCompatible)
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using future type keyword", tokenText, "");
return identifierOrType();
@@ -1774,17 +1778,17 @@ int TScanContext::firstGenerationImage(bool inEs310)
int TScanContext::secondGenerationImage()
{
- if (parseContext.profile == EEsProfile && parseContext.version >= 310) {
+ if (parseContext.isEsProfile() && parseContext.version >= 310) {
reservedWord();
return keyword;
}
if (parseContext.symbolTable.atBuiltInLevel() ||
- (parseContext.profile != EEsProfile &&
+ (!parseContext.isEsProfile() &&
(parseContext.version >= 420 || parseContext.extensionTurnedOn(E_GL_ARB_shader_image_load_store))))
return keyword;
- if (parseContext.forwardCompatible)
+ if (parseContext.isForwardCompatible())
parseContext.warn(loc, "using future type keyword", tokenText, "");
return identifierOrType();
diff --git a/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp b/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp
index 6f9db0195c..44ce1c19d1 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/ShaderLang.cpp
@@ -288,6 +288,11 @@ void InitializeStageSymbolTable(TBuiltInParseables& builtInParseables, int versi
EShLanguage language, EShSource source, TInfoSink& infoSink, TSymbolTable** commonTable,
TSymbolTable** symbolTables)
{
+#ifdef GLSLANG_WEB
+ profile = EEsProfile;
+ version = 310;
+#endif
+
(*symbolTables[language]).adoptLevels(*commonTable[CommonIndex(profile, language)]);
InitializeSymbolTable(builtInParseables.getStageString(language), version, profile, spvVersion, language, source,
infoSink, *symbolTables[language]);
@@ -304,6 +309,11 @@ void InitializeStageSymbolTable(TBuiltInParseables& builtInParseables, int versi
//
bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TSymbolTable** symbolTables, int version, EProfile profile, const SpvVersion& spvVersion, EShSource source)
{
+#ifdef GLSLANG_WEB
+ profile = EEsProfile;
+ version = 310;
+#endif
+
std::unique_ptr<TBuiltInParseables> builtInParseables(CreateBuiltInParseables(infoSink, source));
if (builtInParseables == nullptr)
@@ -326,6 +336,7 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS
InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangFragment, source,
infoSink, commonTable, symbolTables);
+#ifndef GLSLANG_WEB
// check for tessellation
if ((profile != EEsProfile && version >= 150) ||
(profile == EEsProfile && version >= 310)) {
@@ -340,6 +351,7 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS
(profile == EEsProfile && version >= 310))
InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangGeometry, source,
infoSink, commonTable, symbolTables);
+#endif
// check for compute
if ((profile != EEsProfile && version >= 420) ||
@@ -347,7 +359,6 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS
InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangCompute, source,
infoSink, commonTable, symbolTables);
-#ifdef NV_EXTENSIONS
// check for ray tracing stages
if (profile != EEsProfile && version >= 450) {
InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangRayGenNV, source,
@@ -375,9 +386,6 @@ bool InitializeSymbolTables(TInfoSink& infoSink, TSymbolTable** commonTable, TS
(profile == EEsProfile && version >= 320))
InitializeStageSymbolTable(*builtInParseables, version, profile, spvVersion, EShLangTaskNV, source,
infoSink, commonTable, symbolTables);
-#endif
-
-
return true;
}
@@ -479,11 +487,13 @@ void SetupBuiltinSymbolTable(int version, EProfile profile, const SpvVersion& sp
// Function to Print all builtins
void DumpBuiltinSymbolTable(TInfoSink& infoSink, const TSymbolTable& symbolTable)
{
+#ifndef GLSLANG_WEB
infoSink.debug << "BuiltinSymbolTable {\n";
symbolTable.dump(infoSink, true);
infoSink.debug << "}\n";
+#endif
}
// Return true if the shader was correctly specified for version/profile/stage.
@@ -581,6 +591,7 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo
break;
}
+#ifndef GLSLANG_WEB
// Correct for stage type...
switch (stage) {
case EShLangGeometry:
@@ -612,7 +623,6 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo
version = profile == EEsProfile ? 310 : 420;
}
break;
-#ifdef NV_EXTENSIONS
case EShLangRayGenNV:
case EShLangIntersectNV:
case EShLangAnyHitNV:
@@ -633,7 +643,6 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo
infoSink.info.message(EPrefixError, "#version: mesh/task shaders require es profile with version 320 or above, or non-es profile with version 450 or above");
version = profile == EEsProfile ? 320 : 450;
}
-#endif
default:
break;
}
@@ -646,15 +655,10 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo
// Check for SPIR-V compatibility
if (spvVersion.spv != 0) {
switch (profile) {
- case EEsProfile:
- if (spvVersion.vulkan > 0 && version < 310) {
+ case EEsProfile:
+ if (version < 310) {
correct = false;
- infoSink.info.message(EPrefixError, "#version: ES shaders for Vulkan SPIR-V require version 310 or higher");
- version = 310;
- }
- if (spvVersion.openGl >= 100) {
- correct = false;
- infoSink.info.message(EPrefixError, "#version: ES shaders for OpenGL SPIR-V are not supported");
+ infoSink.info.message(EPrefixError, "#version: ES shaders for SPIR-V require version 310 or higher");
version = 310;
}
break;
@@ -675,6 +679,7 @@ bool DeduceVersionProfile(TInfoSink& infoSink, EShLanguage stage, bool versionNo
break;
}
}
+#endif
return correct;
}
@@ -833,13 +838,17 @@ bool ProcessDeferred(
// Get all the stages, languages, clients, and other environment
// stuff sorted out.
- EShSource source = (messages & EShMsgReadHlsl) != 0 ? EShSourceHlsl : EShSourceGlsl;
+ EShSource sourceGuess = (messages & EShMsgReadHlsl) != 0 ? EShSourceHlsl : EShSourceGlsl;
SpvVersion spvVersion;
EShLanguage stage = compiler->getLanguage();
- TranslateEnvironment(environment, messages, source, stage, spvVersion);
+ TranslateEnvironment(environment, messages, sourceGuess, stage, spvVersion);
+#ifdef ENABLE_HLSL
+ EShSource source = sourceGuess;
if (environment != nullptr && environment->target.hlslFunctionality1)
intermediate.setHlslFunctionality1();
-
+#else
+ const EShSource source = EShSourceGlsl;
+#endif
// First, without using the preprocessor or parser, find the #version, so we know what
// symbol tables, processing rules, etc. to set up. This does not need the extra strings
// outlined above, just the user shader, after the system and user preambles.
@@ -852,6 +861,7 @@ bool ProcessDeferred(
: userInput.scanVersion(version, profile, versionNotFirstToken);
bool versionNotFound = version == 0;
if (forceDefaultVersionAndProfile && source == EShSourceGlsl) {
+#ifndef GLSLANG_WEB
if (! (messages & EShMsgSuppressWarnings) && ! versionNotFound &&
(version != defaultVersion || profile != defaultProfile)) {
compiler->infoSink.info << "Warning, (version, profile) forced to be ("
@@ -859,7 +869,7 @@ bool ProcessDeferred(
<< "), while in source code it is ("
<< version << ", " << ProfileName(profile) << ")\n";
}
-
+#endif
if (versionNotFound) {
versionNotFirstToken = false;
versionNotFirst = false;
@@ -871,7 +881,13 @@ bool ProcessDeferred(
bool goodVersion = DeduceVersionProfile(compiler->infoSink, stage,
versionNotFirst, defaultVersion, source, version, profile, spvVersion);
+#ifdef GLSLANG_WEB
+ profile = EEsProfile;
+ version = 310;
+#endif
+
bool versionWillBeError = (versionNotFound || (profile == EEsProfile && version >= 300 && versionNotFirst));
+#ifndef GLSLANG_WEB
bool warnVersionNotFirst = false;
if (! versionWillBeError && versionNotFirstToken) {
if (messages & EShMsgRelaxedErrors)
@@ -879,6 +895,7 @@ bool ProcessDeferred(
else
versionWillBeError = true;
}
+#endif
intermediate.setSource(source);
intermediate.setVersion(version);
@@ -887,8 +904,10 @@ bool ProcessDeferred(
RecordProcesses(intermediate, messages, sourceEntryPointName);
if (spvVersion.vulkan > 0)
intermediate.setOriginUpperLeft();
+#ifdef ENABLE_HLSL
if ((messages & EShMsgHlslOffsets) || source == EShSourceHlsl)
intermediate.setHlslOffsets();
+#endif
if (messages & EShMsgDebugInfo) {
intermediate.setSourceFile(names[numPre]);
for (int s = 0; s < numStrings; ++s) {
@@ -938,11 +957,13 @@ bool ProcessDeferred(
parseContext->setLimits(*resources);
if (! goodVersion)
parseContext->addError();
+#ifndef GLSLANG_WEB
if (warnVersionNotFirst) {
TSourceLoc loc;
loc.init();
parseContext->warn(loc, "Illegal to have non-comment, non-whitespace tokens before #version", "#version", "");
}
+#endif
parseContext->initializeExtensionBehavior();
@@ -973,6 +994,8 @@ bool ProcessDeferred(
return success;
}
+#ifndef GLSLANG_WEB
+
// Responsible for keeping track of the most recent source string and line in
// the preprocessor and outputting newlines appropriately if the source string
// or line changes.
@@ -1169,6 +1192,8 @@ struct DoPreprocessing {
std::string* outputString;
};
+#endif
+
// DoFullParse is a valid ProcessingConext template argument for fully
// parsing the shader. It populates the "intermediate" with the AST.
struct DoFullParse{
@@ -1199,6 +1224,7 @@ struct DoFullParse{
}
};
+#ifndef GLSLANG_WEB
// Take a single compilation unit, and run the preprocessor on it.
// Return: True if there were no issues found in preprocessing,
// False if during preprocessing any unknown version, pragmas or
@@ -1231,6 +1257,7 @@ bool PreprocessDeferred(
forwardCompatible, messages, intermediate, parser,
false, includer);
}
+#endif
//
// do a partial compile on the given strings for a single compilation unit
@@ -1749,6 +1776,11 @@ void TShader::addProcesses(const std::vector<std::string>& p)
intermediate->addProcesses(p);
}
+void TShader::setInvertY(bool invert) { intermediate->setInvertY(invert); }
+void TShader::setNanMinMaxClamp(bool useNonNan) { intermediate->setNanMinMaxClamp(useNonNan); }
+
+#ifndef GLSLANG_WEB
+
// Set binding base for given resource type
void TShader::setShiftBinding(TResourceType res, unsigned int base) {
intermediate->setShiftBinding(res, base);
@@ -1776,7 +1808,7 @@ void TShader::setShiftSsboBinding(unsigned int base) { setShiftBinding(EResSs
// Enables binding automapping using TIoMapper
void TShader::setAutoMapBindings(bool map) { intermediate->setAutoMapBindings(map); }
// Enables position.Y output negation in vertex shader
-void TShader::setInvertY(bool invert) { intermediate->setInvertY(invert); }
+
// Fragile: currently within one stage: simple auto-assignment of location
void TShader::setAutoMapLocations(bool map) { intermediate->setAutoMapLocations(map); }
void TShader::addUniformLocationOverride(const char* name, int loc)
@@ -1787,12 +1819,16 @@ void TShader::setUniformLocationBase(int base)
{
intermediate->setUniformLocationBase(base);
}
-// See comment above TDefaultHlslIoMapper in iomapper.cpp:
-void TShader::setHlslIoMapping(bool hlslIoMap) { intermediate->setHlslIoMapping(hlslIoMap); }
-void TShader::setFlattenUniformArrays(bool flatten) { intermediate->setFlattenUniformArrays(flatten); }
void TShader::setNoStorageFormat(bool useUnknownFormat) { intermediate->setNoStorageFormat(useUnknownFormat); }
void TShader::setResourceSetBinding(const std::vector<std::string>& base) { intermediate->setResourceSetBinding(base); }
void TShader::setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode) { intermediate->setTextureSamplerTransformMode(mode); }
+#endif
+
+#ifdef ENABLE_HLSL
+// See comment above TDefaultHlslIoMapper in iomapper.cpp:
+void TShader::setHlslIoMapping(bool hlslIoMap) { intermediate->setHlslIoMapping(hlslIoMap); }
+void TShader::setFlattenUniformArrays(bool flatten) { intermediate->setFlattenUniformArrays(flatten); }
+#endif
//
// Turn the shader strings into a parse tree in the TIntermediate.
@@ -1816,6 +1852,7 @@ bool TShader::parse(const TBuiltInResource* builtInResources, int defaultVersion
&environment);
}
+#ifndef GLSLANG_WEB
// Fill in a string with the result of preprocessing ShaderStrings
// Returns true if all extensions, pragmas and version strings were valid.
//
@@ -1840,6 +1877,7 @@ bool TShader::preprocess(const TBuiltInResource* builtInResources,
defaultProfile, forceDefaultVersionAndProfile,
forwardCompatible, message, includer, *intermediate, output_string);
}
+#endif
const char* TShader::getInfoLog()
{
@@ -1851,7 +1889,11 @@ const char* TShader::getInfoDebugLog()
return infoSink->debug.c_str();
}
-TProgram::TProgram() : reflection(0), ioMapper(nullptr), linked(false)
+TProgram::TProgram() :
+#ifndef GLSLANG_WEB
+ reflection(0),
+#endif
+ linked(false)
{
pool = new TPoolAllocator;
infoSink = new TInfoSink;
@@ -1863,9 +1905,10 @@ TProgram::TProgram() : reflection(0), ioMapper(nullptr), linked(false)
TProgram::~TProgram()
{
- delete ioMapper;
delete infoSink;
+#ifndef GLSLANG_WEB
delete reflection;
+#endif
for (int s = 0; s < EShLangCount; ++s)
if (newedIntermediate[s])
@@ -1910,6 +1953,7 @@ bool TProgram::linkStage(EShLanguage stage, EShMessages messages)
if (stages[stage].size() == 0)
return true;
+#ifndef GLSLANG_WEB
int numEsShaders = 0, numNonEsShaders = 0;
for (auto it = stages[stage].begin(); it != stages[stage].end(); ++it) {
if ((*it)->intermediate->getProfile() == EEsProfile) {
@@ -1958,7 +2002,9 @@ bool TProgram::linkStage(EShLanguage stage, EShMessages messages)
for (it = stages[stage].begin(); it != stages[stage].end(); ++it)
intermediate[stage]->merge(*infoSink, *(*it)->intermediate);
}
-
+#else
+ intermediate[stage] = stages[stage].front()->intermediate;
+#endif
intermediate[stage]->finalCheck(*infoSink, (messages & EShMsgKeepUncalled) != 0);
if (messages & EShMsgAST)
@@ -1977,13 +2023,15 @@ const char* TProgram::getInfoDebugLog()
return infoSink->debug.c_str();
}
+#ifndef GLSLANG_WEB
+
//
// Reflection implementation.
//
bool TProgram::buildReflection(int opts)
{
- if (! linked || reflection)
+ if (! linked || reflection != nullptr)
return false;
int firstStage = EShLangVertex, lastStage = EShLangFragment;
@@ -2013,8 +2061,10 @@ bool TProgram::buildReflection(int opts)
return true;
}
-unsigned TProgram::getLocalSize(int dim) const { return reflection->getLocalSize(dim); }
-int TProgram::getReflectionIndex(const char* name) const { return reflection->getIndex(name); }
+unsigned TProgram::getLocalSize(int dim) const { return reflection->getLocalSize(dim); }
+int TProgram::getReflectionIndex(const char* name) const { return reflection->getIndex(name); }
+int TProgram::getReflectionPipeIOIndex(const char* name, const bool inOrOut) const
+ { return reflection->getPipeIOIndex(name, inOrOut); }
int TProgram::getNumUniformVariables() const { return reflection->getNumUniforms(); }
const TObjectReflection& TProgram::getUniform(int index) const { return reflection->getUniform(index); }
@@ -2030,27 +2080,31 @@ int TProgram::getNumBufferBlocks() const { return r
const TObjectReflection& TProgram::getBufferBlock(int index) const { return reflection->getStorageBufferBlock(index); }
int TProgram::getNumAtomicCounters() const { return reflection->getNumAtomicCounters(); }
const TObjectReflection& TProgram::getAtomicCounter(int index) const { return reflection->getAtomicCounter(index); }
-
-void TProgram::dumpReflection() { reflection->dump(); }
+void TProgram::dumpReflection() { if (reflection != nullptr) reflection->dump(); }
//
// I/O mapping implementation.
//
-bool TProgram::mapIO(TIoMapResolver* resolver)
+bool TProgram::mapIO(TIoMapResolver* pResolver, TIoMapper* pIoMapper)
{
- if (! linked || ioMapper)
+ if (! linked)
return false;
-
- ioMapper = new TIoMapper;
-
+ TIoMapper* ioMapper = nullptr;
+ TIoMapper defaultIOMapper;
+ if (pIoMapper == nullptr)
+ ioMapper = &defaultIOMapper;
+ else
+ ioMapper = pIoMapper;
for (int s = 0; s < EShLangCount; ++s) {
if (intermediate[s]) {
- if (! ioMapper->addStage((EShLanguage)s, *intermediate[s], *infoSink, resolver))
+ if (! ioMapper->addStage((EShLanguage)s, *intermediate[s], *infoSink, pResolver))
return false;
}
}
- return true;
+ return ioMapper->doMap(pResolver, *infoSink);
}
+#endif // GLSLANG_WEB
+
} // end namespace glslang
diff --git a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp
index c0a02e68a7..44682379f7 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.cpp
@@ -61,63 +61,66 @@ void TType::buildMangledName(TString& mangledName) const
switch (basicType) {
case EbtFloat: mangledName += 'f'; break;
- case EbtDouble: mangledName += 'd'; break;
- case EbtFloat16: mangledName += "f16"; break;
case EbtInt: mangledName += 'i'; break;
case EbtUint: mangledName += 'u'; break;
+ case EbtBool: mangledName += 'b'; break;
+#ifndef GLSLANG_WEB
+ case EbtDouble: mangledName += 'd'; break;
+ case EbtFloat16: mangledName += "f16"; break;
case EbtInt8: mangledName += "i8"; break;
case EbtUint8: mangledName += "u8"; break;
case EbtInt16: mangledName += "i16"; break;
case EbtUint16: mangledName += "u16"; break;
case EbtInt64: mangledName += "i64"; break;
case EbtUint64: mangledName += "u64"; break;
- case EbtBool: mangledName += 'b'; break;
case EbtAtomicUint: mangledName += "au"; break;
-#ifdef NV_EXTENSIONS
case EbtAccStructNV: mangledName += "asnv"; break;
#endif
case EbtSampler:
switch (sampler.type) {
-#ifdef AMD_EXTENSIONS
+#ifndef GLSLANG_WEB
case EbtFloat16: mangledName += "f16"; break;
#endif
case EbtInt: mangledName += "i"; break;
case EbtUint: mangledName += "u"; break;
default: break; // some compilers want this
}
- if (sampler.image)
- mangledName += "I"; // a normal image
- else if (sampler.sampler)
+ if (sampler.isImageClass())
+ mangledName += "I"; // a normal image or subpass
+ else if (sampler.isPureSampler())
mangledName += "p"; // a "pure" sampler
- else if (!sampler.combined)
+ else if (!sampler.isCombined())
mangledName += "t"; // a "pure" texture
else
mangledName += "s"; // traditional combined sampler
- if (sampler.arrayed)
+ if (sampler.isArrayed())
mangledName += "A";
- if (sampler.shadow)
+ if (sampler.isShadow())
mangledName += "S";
- if (sampler.external)
+ if (sampler.isExternal())
mangledName += "E";
- if (sampler.yuv)
+ if (sampler.isYuv())
mangledName += "Y";
switch (sampler.dim) {
- case Esd1D: mangledName += "1"; break;
case Esd2D: mangledName += "2"; break;
case Esd3D: mangledName += "3"; break;
case EsdCube: mangledName += "C"; break;
+#ifndef GLSLANG_WEB
+ case Esd1D: mangledName += "1"; break;
case EsdRect: mangledName += "R2"; break;
case EsdBuffer: mangledName += "B"; break;
case EsdSubpass: mangledName += "P"; break;
+#endif
default: break; // some compilers want this
}
+#ifdef ENABLE_HLSL
if (sampler.hasReturnStruct()) {
// Name mangle for sampler return struct uses struct table index.
mangledName += "-tx-struct";
char text[16]; // plenty enough space for the small integers.
- snprintf(text, sizeof(text), "%d-", sampler.structReturnIndex);
+ snprintf(text, sizeof(text), "%d-", sampler.getStructReturnIndex());
mangledName += text;
} else {
switch (sampler.getVectorSize()) {
@@ -127,8 +130,9 @@ void TType::buildMangledName(TString& mangledName) const
case 4: break; // default to prior name mangle behavior
}
}
+#endif
- if (sampler.ms)
+ if (sampler.isMultiSample())
mangledName += "M";
break;
case EbtStruct:
@@ -172,6 +176,8 @@ void TType::buildMangledName(TString& mangledName) const
}
}
+#ifndef GLSLANG_WEB
+
//
// Dump functions.
//
@@ -184,7 +190,7 @@ void TSymbol::dumpExtensions(TInfoSink& infoSink) const
for (int i = 0; i < numExtensions; i++)
infoSink.debug << getExtensions()[i] << ",";
-
+
infoSink.debug << ">";
}
}
@@ -229,7 +235,7 @@ void TFunction::dump(TInfoSink& infoSink, bool complete) const
infoSink.debug << "\n";
}
-void TAnonMember::dump(TInfoSink& TInfoSink, bool complete) const
+void TAnonMember::dump(TInfoSink& TInfoSink, bool) const
{
TInfoSink.debug << "anonymous member " << getMemberNumber() << " of " << getAnonContainer().getName().c_str()
<< "\n";
@@ -250,6 +256,8 @@ void TSymbolTable::dump(TInfoSink& infoSink, bool complete) const
}
}
+#endif
+
//
// Functions have buried pointers to delete.
//
diff --git a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h
index f3873cff02..40ca3da532 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/SymbolTable.h
@@ -116,8 +116,11 @@ public:
}
virtual int getNumExtensions() const { return extensions == nullptr ? 0 : (int)extensions->size(); }
virtual const char** getExtensions() const { return extensions->data(); }
+
+#ifndef GLSLANG_WEB
virtual void dump(TInfoSink& infoSink, bool complete = false) const = 0;
void dumpExtensions(TInfoSink& infoSink) const;
+#endif
virtual bool isReadOnly() const { return ! writable; }
virtual void makeReadOnly() { writable = false; }
@@ -193,7 +196,9 @@ public:
}
virtual const char** getMemberExtensions(int member) const { return (*memberExtensions)[member].data(); }
+#ifndef GLSLANG_WEB
virtual void dump(TInfoSink& infoSink, bool complete = false) const;
+#endif
protected:
explicit TVariable(const TVariable&);
@@ -314,7 +319,9 @@ public:
virtual TParameter& operator[](int i) { assert(writable); return parameters[i]; }
virtual const TParameter& operator[](int i) const { return parameters[i]; }
+#ifndef GLSLANG_WEB
virtual void dump(TInfoSink& infoSink, bool complete = false) const override;
+#endif
protected:
explicit TFunction(const TFunction&);
@@ -374,7 +381,9 @@ public:
virtual const char** getExtensions() const override { return anonContainer.getMemberExtensions(memberNumber); }
virtual int getAnonId() const { return anonId; }
+#ifndef GLSLANG_WEB
virtual void dump(TInfoSink& infoSink, bool complete = false) const override;
+#endif
protected:
explicit TAnonMember(const TAnonMember&);
@@ -542,7 +551,9 @@ public:
void relateToOperator(const char* name, TOperator op);
void setFunctionExtensions(const char* name, int num, const char* const extensions[]);
+#ifndef GLSLANG_WEB
void dump(TInfoSink& infoSink, bool complete = false) const;
+#endif
TSymbolTableLevel* clone() const;
void readOnly();
@@ -843,7 +854,9 @@ public:
}
int getMaxSymbolId() { return uniqueId; }
+#ifndef GLSLANG_WEB
void dump(TInfoSink& infoSink, bool complete = false) const;
+#endif
void copyTable(const TSymbolTable& copyOf);
void setPreviousDefaultPrecisions(TPrecisionQualifier *p) { table[currentLevel()]->setPreviousDefaultPrecisions(p); }
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp b/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp
index f19c38502d..e549074df8 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/Versions.cpp
@@ -145,6 +145,8 @@
namespace glslang {
+#ifndef GLSLANG_WEB
+
//
// Initialize all extensions, almost always to 'disable', as once their features
// are incorporated into a core version, their features are supported through allowing that
@@ -170,8 +172,10 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_ARB_tessellation_shader] = EBhDisable;
extensionBehavior[E_GL_ARB_enhanced_layouts] = EBhDisable;
extensionBehavior[E_GL_ARB_texture_cube_map_array] = EBhDisable;
+ extensionBehavior[E_GL_ARB_texture_multisample] = EBhDisable;
extensionBehavior[E_GL_ARB_shader_texture_lod] = EBhDisable;
extensionBehavior[E_GL_ARB_explicit_attrib_location] = EBhDisable;
+ extensionBehavior[E_GL_ARB_explicit_uniform_location] = EBhDisable;
extensionBehavior[E_GL_ARB_shader_image_load_store] = EBhDisable;
extensionBehavior[E_GL_ARB_shader_atomic_counters] = EBhDisable;
extensionBehavior[E_GL_ARB_shader_draw_parameters] = EBhDisable;
@@ -180,6 +184,7 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_ARB_shader_texture_image_samples] = EBhDisable;
extensionBehavior[E_GL_ARB_viewport_array] = EBhDisable;
extensionBehavior[E_GL_ARB_gpu_shader_int64] = EBhDisable;
+ extensionBehavior[E_GL_ARB_gpu_shader_fp64] = EBhDisable;
extensionBehavior[E_GL_ARB_shader_ballot] = EBhDisable;
extensionBehavior[E_GL_ARB_sparse_texture2] = EBhDisable;
extensionBehavior[E_GL_ARB_sparse_texture_clamp] = EBhDisable;
@@ -187,6 +192,10 @@ void TParseVersions::initializeExtensionBehavior()
// extensionBehavior[E_GL_ARB_cull_distance] = EBhDisable; // present for 4.5, but need extension control over block members
extensionBehavior[E_GL_ARB_post_depth_coverage] = EBhDisable;
extensionBehavior[E_GL_ARB_shader_viewport_layer_array] = EBhDisable;
+ extensionBehavior[E_GL_ARB_fragment_shader_interlock] = EBhDisable;
+ extensionBehavior[E_GL_ARB_shader_clock] = EBhDisable;
+ extensionBehavior[E_GL_ARB_uniform_buffer_object] = EBhDisable;
+ extensionBehavior[E_GL_ARB_sample_shading] = EBhDisable;
extensionBehavior[E_GL_KHR_shader_subgroup_basic] = EBhDisable;
extensionBehavior[E_GL_KHR_shader_subgroup_vote] = EBhDisable;
@@ -210,6 +219,8 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_EXT_fragment_invocation_density] = EBhDisable;
extensionBehavior[E_GL_EXT_buffer_reference] = EBhDisable;
extensionBehavior[E_GL_EXT_buffer_reference2] = EBhDisable;
+ extensionBehavior[E_GL_EXT_buffer_reference_uvec2] = EBhDisable;
+ extensionBehavior[E_GL_EXT_demote_to_helper_invocation] = EBhDisable;
extensionBehavior[E_GL_EXT_shader_16bit_storage] = EBhDisable;
extensionBehavior[E_GL_EXT_shader_8bit_storage] = EBhDisable;
@@ -218,7 +229,6 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_GOOGLE_cpp_style_line_directive] = EBhDisable;
extensionBehavior[E_GL_GOOGLE_include_directive] = EBhDisable;
-#ifdef AMD_EXTENSIONS
extensionBehavior[E_GL_AMD_shader_ballot] = EBhDisable;
extensionBehavior[E_GL_AMD_shader_trinary_minmax] = EBhDisable;
extensionBehavior[E_GL_AMD_shader_explicit_vertex_parameter] = EBhDisable;
@@ -229,9 +239,9 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_AMD_shader_image_load_store_lod] = EBhDisable;
extensionBehavior[E_GL_AMD_shader_fragment_mask] = EBhDisable;
extensionBehavior[E_GL_AMD_gpu_shader_half_float_fetch] = EBhDisable;
-#endif
-#ifdef NV_EXTENSIONS
+ extensionBehavior[E_GL_INTEL_shader_integer_functions2] = EBhDisable;
+
extensionBehavior[E_GL_NV_sample_mask_override_coverage] = EBhDisable;
extensionBehavior[E_SPV_NV_geometry_shader_passthrough] = EBhDisable;
extensionBehavior[E_GL_NV_viewport_array2] = EBhDisable;
@@ -247,9 +257,10 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_NV_compute_shader_derivatives] = EBhDisable;
extensionBehavior[E_GL_NV_shader_texture_footprint] = EBhDisable;
extensionBehavior[E_GL_NV_mesh_shader] = EBhDisable;
-#endif
extensionBehavior[E_GL_NV_cooperative_matrix] = EBhDisable;
+ extensionBehavior[E_GL_NV_shader_sm_builtins] = EBhDisable;
+ extensionBehavior[E_GL_NV_integer_cooperative_matrix] = EBhDisable;
// AEP
extensionBehavior[E_GL_ANDROID_extension_pack_es31a] = EBhDisable;
@@ -282,6 +293,7 @@ void TParseVersions::initializeExtensionBehavior()
// EXT extensions
extensionBehavior[E_GL_EXT_device_group] = EBhDisable;
extensionBehavior[E_GL_EXT_multiview] = EBhDisable;
+ extensionBehavior[E_GL_EXT_shader_realtime_clock] = EBhDisable;
// OVR extensions
extensionBehavior[E_GL_OVR_multiview] = EBhDisable;
@@ -296,16 +308,26 @@ void TParseVersions::initializeExtensionBehavior()
extensionBehavior[E_GL_EXT_shader_explicit_arithmetic_types_float16] = EBhDisable;
extensionBehavior[E_GL_EXT_shader_explicit_arithmetic_types_float32] = EBhDisable;
extensionBehavior[E_GL_EXT_shader_explicit_arithmetic_types_float64] = EBhDisable;
+
+ // subgroup extended types
+ extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_int8] = EBhDisable;
+ extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_int16] = EBhDisable;
+ extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_int64] = EBhDisable;
+ extensionBehavior[E_GL_EXT_shader_subgroup_extended_types_float16] = EBhDisable;
}
+#endif // GLSLANG_WEB
// Get code that is not part of a shared symbol table, is specific to this shader,
// or needed by the preprocessor (which does not use a shared symbol table).
void TParseVersions::getPreamble(std::string& preamble)
{
- if (profile == EEsProfile) {
+ if (isEsProfile()) {
preamble =
"#define GL_ES 1\n"
"#define GL_FRAGMENT_PRECISION_HIGH 1\n"
+#ifdef GLSLANG_WEB
+ ;
+#else
"#define GL_OES_texture_3D 1\n"
"#define GL_OES_standard_derivatives 1\n"
"#define GL_EXT_frag_depth 1\n"
@@ -317,7 +339,6 @@ void TParseVersions::getPreamble(std::string& preamble)
// AEP
"#define GL_ANDROID_extension_pack_es31a 1\n"
- "#define GL_KHR_blend_equation_advanced 1\n"
"#define GL_OES_sample_variables 1\n"
"#define GL_OES_shader_image_atomic 1\n"
"#define GL_OES_shader_multisample_interpolation 1\n"
@@ -345,11 +366,9 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_EXT_shader_non_constant_global_initializers 1\n"
;
-#ifdef NV_EXTENSIONS
- if (profile == EEsProfile && version >= 300) {
+ if (isEsProfile() && version >= 300) {
preamble += "#define GL_NV_shader_noperspective_interpolation 1\n";
}
-#endif
} else {
preamble =
@@ -363,8 +382,10 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_ARB_tessellation_shader 1\n"
"#define GL_ARB_enhanced_layouts 1\n"
"#define GL_ARB_texture_cube_map_array 1\n"
+ "#define GL_ARB_texture_multisample 1\n"
"#define GL_ARB_shader_texture_lod 1\n"
"#define GL_ARB_explicit_attrib_location 1\n"
+ "#define GL_ARB_explicit_uniform_location 1\n"
"#define GL_ARB_shader_image_load_store 1\n"
"#define GL_ARB_shader_atomic_counters 1\n"
"#define GL_ARB_shader_draw_parameters 1\n"
@@ -373,12 +394,16 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_ARB_shader_texture_image_samples 1\n"
"#define GL_ARB_viewport_array 1\n"
"#define GL_ARB_gpu_shader_int64 1\n"
+ "#define GL_ARB_gpu_shader_fp64 1\n"
"#define GL_ARB_shader_ballot 1\n"
"#define GL_ARB_sparse_texture2 1\n"
"#define GL_ARB_sparse_texture_clamp 1\n"
"#define GL_ARB_shader_stencil_export 1\n"
+ "#define GL_ARB_sample_shading 1\n"
// "#define GL_ARB_cull_distance 1\n" // present for 4.5, but need extension control over block members
"#define GL_ARB_post_depth_coverage 1\n"
+ "#define GL_ARB_fragment_shader_interlock 1\n"
+ "#define GL_ARB_uniform_buffer_object 1\n"
"#define GL_EXT_shader_non_constant_global_initializers 1\n"
"#define GL_EXT_shader_image_load_formatted 1\n"
"#define GL_EXT_post_depth_coverage 1\n"
@@ -391,6 +416,8 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_EXT_fragment_invocation_density 1\n"
"#define GL_EXT_buffer_reference 1\n"
"#define GL_EXT_buffer_reference2 1\n"
+ "#define GL_EXT_buffer_reference_uvec2 1\n"
+ "#define GL_EXT_demote_to_helper_invocation 1\n"
// GL_KHR_shader_subgroup
"#define GL_KHR_shader_subgroup_basic 1\n"
@@ -403,8 +430,8 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_KHR_shader_subgroup_quad 1\n"
"#define E_GL_EXT_shader_atomic_int64 1\n"
+ "#define E_GL_EXT_shader_realtime_clock 1\n"
-#ifdef AMD_EXTENSIONS
"#define GL_AMD_shader_ballot 1\n"
"#define GL_AMD_shader_trinary_minmax 1\n"
"#define GL_AMD_shader_explicit_vertex_parameter 1\n"
@@ -415,9 +442,9 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_AMD_shader_image_load_store_lod 1\n"
"#define GL_AMD_shader_fragment_mask 1\n"
"#define GL_AMD_gpu_shader_half_float_fetch 1\n"
-#endif
-#ifdef NV_EXTENSIONS
+ "#define GL_INTEL_shader_integer_functions2 1\n"
+
"#define GL_NV_sample_mask_override_coverage 1\n"
"#define GL_NV_geometry_shader_passthrough 1\n"
"#define GL_NV_viewport_array2 1\n"
@@ -430,8 +457,8 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_NV_compute_shader_derivatives 1\n"
"#define GL_NV_shader_texture_footprint 1\n"
"#define GL_NV_mesh_shader 1\n"
-#endif
"#define GL_NV_cooperative_matrix 1\n"
+ "#define GL_NV_integer_cooperative_matrix 1\n"
"#define GL_EXT_shader_explicit_arithmetic_types 1\n"
"#define GL_EXT_shader_explicit_arithmetic_types_int8 1\n"
@@ -441,6 +468,11 @@ void TParseVersions::getPreamble(std::string& preamble)
"#define GL_EXT_shader_explicit_arithmetic_types_float16 1\n"
"#define GL_EXT_shader_explicit_arithmetic_types_float32 1\n"
"#define GL_EXT_shader_explicit_arithmetic_types_float64 1\n"
+
+ "#define GL_EXT_shader_subgroup_extended_types_int8 1\n"
+ "#define GL_EXT_shader_subgroup_extended_types_int16 1\n"
+ "#define GL_EXT_shader_subgroup_extended_types_int64 1\n"
+ "#define GL_EXT_shader_subgroup_extended_types_float16 1\n"
;
if (version >= 150) {
@@ -450,13 +482,16 @@ void TParseVersions::getPreamble(std::string& preamble)
if (profile == ECompatibilityProfile)
preamble += "#define GL_compatibility_profile 1\n";
}
+#endif // GLSLANG_WEB
}
- if ((profile != EEsProfile && version >= 140) ||
- (profile == EEsProfile && version >= 310)) {
+#ifndef GLSLANG_WEB
+ if ((!isEsProfile() && version >= 140) ||
+ (isEsProfile() && version >= 310)) {
preamble +=
"#define GL_EXT_device_group 1\n"
"#define GL_EXT_multiview 1\n"
+ "#define GL_NV_shader_sm_builtins 1\n"
;
}
@@ -471,7 +506,9 @@ void TParseVersions::getPreamble(std::string& preamble)
preamble +=
"#define GL_GOOGLE_cpp_style_line_directive 1\n"
"#define GL_GOOGLE_include_directive 1\n"
+ "#define GL_KHR_blend_equation_advanced 1\n"
;
+#endif
// #define VULKAN XXXX
const int numberBufSize = 12;
@@ -482,6 +519,8 @@ void TParseVersions::getPreamble(std::string& preamble)
preamble += numberBuf;
preamble += "\n";
}
+
+#ifndef GLSLANG_WEB
// #define GL_SPIRV XXXX
if (spvVersion.openGl > 0) {
preamble += "#define GL_SPIRV ";
@@ -489,22 +528,7 @@ void TParseVersions::getPreamble(std::string& preamble)
preamble += numberBuf;
preamble += "\n";
}
-
-}
-
-//
-// When to use requireProfile():
-//
-// Use if only some profiles support a feature. However, if within a profile the feature
-// is version or extension specific, follow this call with calls to profileRequires().
-//
-// Operation: If the current profile is not one of the profileMask,
-// give an error message.
-//
-void TParseVersions::requireProfile(const TSourceLoc& loc, int profileMask, const char* featureDesc)
-{
- if (! (profile & profileMask))
- error(loc, "not supported with this profile:", featureDesc, ProfileName(profile));
+#endif
}
//
@@ -514,12 +538,12 @@ const char* StageName(EShLanguage stage)
{
switch(stage) {
case EShLangVertex: return "vertex";
+ case EShLangFragment: return "fragment";
+ case EShLangCompute: return "compute";
+#ifndef GLSLANG_WEB
case EShLangTessControl: return "tessellation control";
case EShLangTessEvaluation: return "tessellation evaluation";
case EShLangGeometry: return "geometry";
- case EShLangFragment: return "fragment";
- case EShLangCompute: return "compute";
-#ifdef NV_EXTENSIONS
case EShLangRayGenNV: return "ray-generation";
case EShLangIntersectNV: return "intersection";
case EShLangAnyHitNV: return "any-hit";
@@ -534,6 +558,42 @@ const char* StageName(EShLanguage stage)
}
//
+// When to use requireStage()
+//
+// If only some stages support a feature.
+//
+// Operation: If the current stage is not present, give an error message.
+//
+void TParseVersions::requireStage(const TSourceLoc& loc, EShLanguageMask languageMask, const char* featureDesc)
+{
+ if (((1 << language) & languageMask) == 0)
+ error(loc, "not supported in this stage:", featureDesc, StageName(language));
+}
+
+// If only one stage supports a feature, this can be called. But, all supporting stages
+// must be specified with one call.
+void TParseVersions::requireStage(const TSourceLoc& loc, EShLanguage stage, const char* featureDesc)
+{
+ requireStage(loc, static_cast<EShLanguageMask>(1 << stage), featureDesc);
+}
+
+#ifndef GLSLANG_WEB
+//
+// When to use requireProfile():
+//
+// Use if only some profiles support a feature. However, if within a profile the feature
+// is version or extension specific, follow this call with calls to profileRequires().
+//
+// Operation: If the current profile is not one of the profileMask,
+// give an error message.
+//
+void TParseVersions::requireProfile(const TSourceLoc& loc, int profileMask, const char* featureDesc)
+{
+ if (! (profile & profileMask))
+ error(loc, "not supported with this profile:", featureDesc, ProfileName(profile));
+}
+
+//
// When to use profileRequires():
//
// If a set of profiles have the same requirements for what version or extensions
@@ -550,12 +610,12 @@ const char* StageName(EShLanguage stage)
//
// entry point that takes multiple extensions
-void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, int numExtensions, const char* const extensions[], const char* featureDesc)
+void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, int numExtensions,
+ const char* const extensions[], const char* featureDesc)
{
if (profile & profileMask) {
- bool okay = false;
- if (minVersion > 0 && version >= minVersion)
- okay = true;
+ bool okay = minVersion > 0 && version >= minVersion;
+#ifndef GLSLANG_WEB
for (int i = 0; i < numExtensions; ++i) {
switch (getExtensionBehavior(extensions[i])) {
case EBhWarn:
@@ -568,36 +628,22 @@ void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int
default: break; // some compilers want this
}
}
-
+#endif
if (! okay)
error(loc, "not supported for this version or the enabled extensions", featureDesc, "");
}
}
// entry point for the above that takes a single extension
-void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, const char* extension, const char* featureDesc)
+void TParseVersions::profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, const char* extension,
+ const char* featureDesc)
{
profileRequires(loc, profileMask, minVersion, extension ? 1 : 0, &extension, featureDesc);
}
-//
-// When to use requireStage()
-//
-// If only some stages support a feature.
-//
-// Operation: If the current stage is not present, give an error message.
-//
-void TParseVersions::requireStage(const TSourceLoc& loc, EShLanguageMask languageMask, const char* featureDesc)
-{
- if (((1 << language) & languageMask) == 0)
- error(loc, "not supported in this stage:", featureDesc, StageName(language));
-}
-
-// If only one stage supports a feature, this can be called. But, all supporting stages
-// must be specified with one call.
-void TParseVersions::requireStage(const TSourceLoc& loc, EShLanguage stage, const char* featureDesc)
+void TParseVersions::unimplemented(const TSourceLoc& loc, const char* featureDesc)
{
- requireStage(loc, static_cast<EShLanguageMask>(1 << stage), featureDesc);
+ error(loc, "feature not yet implemented", featureDesc, "");
}
//
@@ -633,11 +679,6 @@ void TParseVersions::requireNotRemoved(const TSourceLoc& loc, int profileMask, i
}
}
-void TParseVersions::unimplemented(const TSourceLoc& loc, const char* featureDesc)
-{
- error(loc, "feature not yet implemented", featureDesc, "");
-}
-
// Returns true if at least one of the extensions in the extensions parameter is requested. Otherwise, returns false.
// Warns appropriately if the requested behavior of an extension is "warn".
bool TParseVersions::checkExtensionsRequested(const TSourceLoc& loc, int numExtensions, const char* const extensions[], const char* featureDesc)
@@ -806,12 +847,22 @@ void TParseVersions::updateExtensionBehavior(int line, const char* extension, co
updateExtensionBehavior(line, "GL_KHR_shader_subgroup_basic", behaviorString);
else if (strcmp(extension, "GL_KHR_shader_subgroup_quad") == 0)
updateExtensionBehavior(line, "GL_KHR_shader_subgroup_basic", behaviorString);
-#ifdef NV_EXTENSIONS
else if (strcmp(extension, "GL_NV_shader_subgroup_partitioned") == 0)
updateExtensionBehavior(line, "GL_KHR_shader_subgroup_basic", behaviorString);
-#endif
- else if (strcmp(extension, "GL_EXT_buffer_reference2") == 0)
+ else if (strcmp(extension, "GL_EXT_buffer_reference2") == 0 ||
+ strcmp(extension, "GL_EXT_buffer_reference_uvec2") == 0)
updateExtensionBehavior(line, "GL_EXT_buffer_reference", behaviorString);
+ else if (strcmp(extension, "GL_NV_integer_cooperative_matrix") == 0)
+ updateExtensionBehavior(line, "GL_NV_cooperative_matrix", behaviorString);
+ // subgroup extended types to explicit types
+ else if (strcmp(extension, "GL_EXT_shader_subgroup_extended_types_int8") == 0)
+ updateExtensionBehavior(line, "GL_EXT_shader_explicit_arithmetic_types_int8", behaviorString);
+ else if (strcmp(extension, "GL_EXT_shader_subgroup_extended_types_int16") == 0)
+ updateExtensionBehavior(line, "GL_EXT_shader_explicit_arithmetic_types_int16", behaviorString);
+ else if (strcmp(extension, "GL_EXT_shader_subgroup_extended_types_int64") == 0)
+ updateExtensionBehavior(line, "GL_EXT_shader_explicit_arithmetic_types_int64", behaviorString);
+ else if (strcmp(extension, "GL_EXT_shader_subgroup_extended_types_float16") == 0)
+ updateExtensionBehavior(line, "GL_EXT_shader_explicit_arithmetic_types_float16", behaviorString);
}
void TParseVersions::updateExtensionBehavior(const char* extension, TExtensionBehavior behavior)
@@ -857,7 +908,6 @@ void TParseVersions::updateExtensionBehavior(const char* extension, TExtensionBe
// Check if extension is used with correct shader stage.
void TParseVersions::checkExtensionStage(const TSourceLoc& loc, const char * const extension)
{
-#ifdef NV_EXTENSIONS
// GL_NV_mesh_shader extension is only allowed in task/mesh shaders
if (strcmp(extension, "GL_NV_mesh_shader") == 0) {
requireStage(loc, (EShLanguageMask)(EShLangTaskNVMask | EShLangMeshNVMask | EShLangFragmentMask),
@@ -865,7 +915,6 @@ void TParseVersions::checkExtensionStage(const TSourceLoc& loc, const char * con
profileRequires(loc, ECoreProfile, 450, 0, "#extension GL_NV_mesh_shader");
profileRequires(loc, EEsProfile, 320, 0, "#extension GL_NV_mesh_shader");
}
-#endif
}
// Call for any operation needing full GLSL integer data-type support.
@@ -878,8 +927,8 @@ void TParseVersions::fullIntegerCheck(const TSourceLoc& loc, const char* op)
// Call for any operation needing GLSL double data-type support.
void TParseVersions::doubleCheck(const TSourceLoc& loc, const char* op)
{
- requireProfile(loc, ECoreProfile | ECompatibilityProfile, op);
- profileRequires(loc, ECoreProfile | ECompatibilityProfile, 400, nullptr, op);
+ //requireProfile(loc, ECoreProfile | ECompatibilityProfile, op);
+ profileRequires(loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader_fp64, op);
}
// Call for any operation needing GLSL float16 data-type support.
@@ -887,9 +936,7 @@ void TParseVersions::float16Check(const TSourceLoc& loc, const char* op, bool bu
{
if (!builtIn) {
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_half_float,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_float16};
requireExtensions(loc, sizeof(extensions)/sizeof(extensions[0]), extensions, op);
@@ -899,9 +946,7 @@ void TParseVersions::float16Check(const TSourceLoc& loc, const char* op, bool bu
bool TParseVersions::float16Arithmetic()
{
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_half_float,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_float16};
return extensionsTurnedOn(sizeof(extensions)/sizeof(extensions[0]), extensions);
@@ -910,9 +955,7 @@ bool TParseVersions::float16Arithmetic()
bool TParseVersions::int16Arithmetic()
{
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_int16,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_int16};
return extensionsTurnedOn(sizeof(extensions)/sizeof(extensions[0]), extensions);
@@ -934,9 +977,7 @@ void TParseVersions::requireFloat16Arithmetic(const TSourceLoc& loc, const char*
combined += featureDesc;
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_half_float,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_float16};
requireExtensions(loc, sizeof(extensions)/sizeof(extensions[0]), extensions, combined.c_str());
@@ -950,9 +991,7 @@ void TParseVersions::requireInt16Arithmetic(const TSourceLoc& loc, const char* o
combined += featureDesc;
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_int16,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_int16};
requireExtensions(loc, sizeof(extensions)/sizeof(extensions[0]), extensions, combined.c_str());
@@ -975,9 +1014,7 @@ void TParseVersions::float16ScalarVectorCheck(const TSourceLoc& loc, const char*
{
if (!builtIn) {
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_half_float,
-#endif
E_GL_EXT_shader_16bit_storage,
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_float16};
@@ -1017,7 +1054,6 @@ void TParseVersions::explicitInt8Check(const TSourceLoc& loc, const char* op, bo
}
}
-#ifdef AMD_EXTENSIONS
// Call for any operation needing GLSL float16 opaque-type support
void TParseVersions::float16OpaqueCheck(const TSourceLoc& loc, const char* op, bool builtIn)
{
@@ -1027,16 +1063,13 @@ void TParseVersions::float16OpaqueCheck(const TSourceLoc& loc, const char* op, b
profileRequires(loc, ECoreProfile | ECompatibilityProfile, 400, nullptr, op);
}
}
-#endif
// Call for any operation needing GLSL explicit int16 data-type support.
void TParseVersions::explicitInt16Check(const TSourceLoc& loc, const char* op, bool builtIn)
{
if (! builtIn) {
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_int16,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_int16};
requireExtensions(loc, sizeof(extensions)/sizeof(extensions[0]), extensions, op);
@@ -1047,9 +1080,7 @@ void TParseVersions::int16ScalarVectorCheck(const TSourceLoc& loc, const char* o
{
if (! builtIn) {
const char* const extensions[] = {
-#if AMD_EXTENSIONS
E_GL_AMD_gpu_shader_int16,
-#endif
E_GL_EXT_shader_16bit_storage,
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_int16};
@@ -1099,6 +1130,14 @@ void TParseVersions::fcoopmatCheck(const TSourceLoc& loc, const char* op, bool b
}
}
+void TParseVersions::intcoopmatCheck(const TSourceLoc& loc, const char* op, bool builtIn)
+{
+ if (!builtIn) {
+ const char* const extensions[] = {E_GL_NV_integer_cooperative_matrix};
+ requireExtensions(loc, sizeof(extensions)/sizeof(extensions[0]), extensions, op);
+ }
+}
+#endif // GLSLANG_WEB
// Call for any operation removed because SPIR-V is in use.
void TParseVersions::spvRemoved(const TSourceLoc& loc, const char* op)
{
@@ -1116,15 +1155,19 @@ void TParseVersions::vulkanRemoved(const TSourceLoc& loc, const char* op)
// Call for any operation that requires Vulkan.
void TParseVersions::requireVulkan(const TSourceLoc& loc, const char* op)
{
+#ifndef GLSLANG_WEB
if (spvVersion.vulkan == 0)
error(loc, "only allowed when using GLSL for Vulkan", op, "");
+#endif
}
// Call for any operation that requires SPIR-V.
void TParseVersions::requireSpv(const TSourceLoc& loc, const char* op)
{
+#ifndef GLSLANG_WEB
if (spvVersion.spv == 0)
error(loc, "only allowed when generating SPIR-V", op, "");
+#endif
}
} // end namespace glslang
diff --git a/thirdparty/glslang/glslang/MachineIndependent/Versions.h b/thirdparty/glslang/glslang/MachineIndependent/Versions.h
index bff082709f..58558e595a 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/Versions.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/Versions.h
@@ -124,8 +124,10 @@ const char* const E_GL_ARB_compute_shader = "GL_ARB_compute_shader
const char* const E_GL_ARB_tessellation_shader = "GL_ARB_tessellation_shader";
const char* const E_GL_ARB_enhanced_layouts = "GL_ARB_enhanced_layouts";
const char* const E_GL_ARB_texture_cube_map_array = "GL_ARB_texture_cube_map_array";
+const char* const E_GL_ARB_texture_multisample = "GL_ARB_texture_multisample";
const char* const E_GL_ARB_shader_texture_lod = "GL_ARB_shader_texture_lod";
const char* const E_GL_ARB_explicit_attrib_location = "GL_ARB_explicit_attrib_location";
+const char* const E_GL_ARB_explicit_uniform_location = "GL_ARB_explicit_uniform_location";
const char* const E_GL_ARB_shader_image_load_store = "GL_ARB_shader_image_load_store";
const char* const E_GL_ARB_shader_atomic_counters = "GL_ARB_shader_atomic_counters";
const char* const E_GL_ARB_shader_draw_parameters = "GL_ARB_shader_draw_parameters";
@@ -134,6 +136,7 @@ const char* const E_GL_ARB_derivative_control = "GL_ARB_derivative_con
const char* const E_GL_ARB_shader_texture_image_samples = "GL_ARB_shader_texture_image_samples";
const char* const E_GL_ARB_viewport_array = "GL_ARB_viewport_array";
const char* const E_GL_ARB_gpu_shader_int64 = "GL_ARB_gpu_shader_int64";
+const char* const E_GL_ARB_gpu_shader_fp64 = "GL_ARB_gpu_shader_fp64";
const char* const E_GL_ARB_shader_ballot = "GL_ARB_shader_ballot";
const char* const E_GL_ARB_sparse_texture2 = "GL_ARB_sparse_texture2";
const char* const E_GL_ARB_sparse_texture_clamp = "GL_ARB_sparse_texture_clamp";
@@ -141,6 +144,10 @@ const char* const E_GL_ARB_shader_stencil_export = "GL_ARB_shader_stencil
// const char* const E_GL_ARB_cull_distance = "GL_ARB_cull_distance"; // present for 4.5, but need extension control over block members
const char* const E_GL_ARB_post_depth_coverage = "GL_ARB_post_depth_coverage";
const char* const E_GL_ARB_shader_viewport_layer_array = "GL_ARB_shader_viewport_layer_array";
+const char* const E_GL_ARB_fragment_shader_interlock = "GL_ARB_fragment_shader_interlock";
+const char* const E_GL_ARB_shader_clock = "GL_ARB_shader_clock";
+const char* const E_GL_ARB_uniform_buffer_object = "GL_ARB_uniform_buffer_object";
+const char* const E_GL_ARB_sample_shading = "GL_ARB_sample_shading";
const char* const E_GL_KHR_shader_subgroup_basic = "GL_KHR_shader_subgroup_basic";
const char* const E_GL_KHR_shader_subgroup_vote = "GL_KHR_shader_subgroup_vote";
@@ -172,6 +179,9 @@ const char* const E_GL_EXT_scalar_block_layout = "GL_EXT_scalar_blo
const char* const E_GL_EXT_fragment_invocation_density = "GL_EXT_fragment_invocation_density";
const char* const E_GL_EXT_buffer_reference = "GL_EXT_buffer_reference";
const char* const E_GL_EXT_buffer_reference2 = "GL_EXT_buffer_reference2";
+const char* const E_GL_EXT_buffer_reference_uvec2 = "GL_EXT_buffer_reference_uvec2";
+const char* const E_GL_EXT_demote_to_helper_invocation = "GL_EXT_demote_to_helper_invocation";
+const char* const E_GL_EXT_shader_realtime_clock = "GL_EXT_shader_realtime_clock";
// Arrays of extensions for the above viewportEXTs duplications
@@ -189,7 +199,6 @@ const int Num_OVR_multiview_EXTs = sizeof(OVR_multiview_EXTs) / sizeof(OVR_multi
const char* const E_GL_GOOGLE_cpp_style_line_directive = "GL_GOOGLE_cpp_style_line_directive";
const char* const E_GL_GOOGLE_include_directive = "GL_GOOGLE_include_directive";
-#ifdef AMD_EXTENSIONS
const char* const E_GL_AMD_shader_ballot = "GL_AMD_shader_ballot";
const char* const E_GL_AMD_shader_trinary_minmax = "GL_AMD_shader_trinary_minmax";
const char* const E_GL_AMD_shader_explicit_vertex_parameter = "GL_AMD_shader_explicit_vertex_parameter";
@@ -200,9 +209,8 @@ const char* const E_GL_AMD_gpu_shader_int16 = "GL_AMD_gpu_sh
const char* const E_GL_AMD_shader_image_load_store_lod = "GL_AMD_shader_image_load_store_lod";
const char* const E_GL_AMD_shader_fragment_mask = "GL_AMD_shader_fragment_mask";
const char* const E_GL_AMD_gpu_shader_half_float_fetch = "GL_AMD_gpu_shader_half_float_fetch";
-#endif
-#ifdef NV_EXTENSIONS
+const char* const E_GL_INTEL_shader_integer_functions2 = "GL_INTEL_shader_integer_functions2";
const char* const E_GL_NV_sample_mask_override_coverage = "GL_NV_sample_mask_override_coverage";
const char* const E_SPV_NV_geometry_shader_passthrough = "GL_NV_geometry_shader_passthrough";
@@ -224,9 +232,10 @@ const char* const E_GL_NV_mesh_shader = "GL_NV_mesh_sh
const char* const viewportEXTs[] = { E_GL_ARB_shader_viewport_layer_array, E_GL_NV_viewport_array2 };
const int Num_viewportEXTs = sizeof(viewportEXTs) / sizeof(viewportEXTs[0]);
-#endif
const char* const E_GL_NV_cooperative_matrix = "GL_NV_cooperative_matrix";
+const char* const E_GL_NV_shader_sm_builtins = "GL_NV_shader_sm_builtins";
+const char* const E_GL_NV_integer_cooperative_matrix = "GL_NV_integer_cooperative_matrix";
// AEP
const char* const E_GL_ANDROID_extension_pack_es31a = "GL_ANDROID_extension_pack_es31a";
@@ -256,7 +265,7 @@ const char* const E_GL_OES_tessellation_point_size = "GL_OES_tessel
const char* const E_GL_OES_texture_buffer = "GL_OES_texture_buffer";
const char* const E_GL_OES_texture_cube_map_array = "GL_OES_texture_cube_map_array";
-// KHX
+// EXT
const char* const E_GL_EXT_shader_explicit_arithmetic_types = "GL_EXT_shader_explicit_arithmetic_types";
const char* const E_GL_EXT_shader_explicit_arithmetic_types_int8 = "GL_EXT_shader_explicit_arithmetic_types_int8";
const char* const E_GL_EXT_shader_explicit_arithmetic_types_int16 = "GL_EXT_shader_explicit_arithmetic_types_int16";
@@ -266,6 +275,11 @@ const char* const E_GL_EXT_shader_explicit_arithmetic_types_float16 = "GL_EXT_s
const char* const E_GL_EXT_shader_explicit_arithmetic_types_float32 = "GL_EXT_shader_explicit_arithmetic_types_float32";
const char* const E_GL_EXT_shader_explicit_arithmetic_types_float64 = "GL_EXT_shader_explicit_arithmetic_types_float64";
+const char* const E_GL_EXT_shader_subgroup_extended_types_int8 = "GL_EXT_shader_subgroup_extended_types_int8";
+const char* const E_GL_EXT_shader_subgroup_extended_types_int16 = "GL_EXT_shader_subgroup_extended_types_int16";
+const char* const E_GL_EXT_shader_subgroup_extended_types_int64 = "GL_EXT_shader_subgroup_extended_types_int64";
+const char* const E_GL_EXT_shader_subgroup_extended_types_float16 = "GL_EXT_shader_subgroup_extended_types_float16";
+
// Arrays of extensions for the above AEP duplications
const char* const AEP_geometry_shader[] = { E_GL_EXT_geometry_shader, E_GL_OES_geometry_shader };
diff --git a/thirdparty/glslang/glslang/MachineIndependent/attribute.cpp b/thirdparty/glslang/glslang/MachineIndependent/attribute.cpp
index d4a23f39de..9585518349 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/attribute.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/attribute.cpp
@@ -34,6 +34,8 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+#ifndef GLSLANG_WEB
+
#include "attribute.h"
#include "../Include/intermediate.h"
#include "ParseHelper.h"
@@ -339,5 +341,6 @@ void TParseContext::handleLoopAttributes(const TAttributes& attributes, TIntermN
}
}
-
} // end namespace glslang
+
+#endif // GLSLANG_WEB
diff --git a/thirdparty/glslang/glslang/MachineIndependent/attribute.h b/thirdparty/glslang/glslang/MachineIndependent/attribute.h
index 844ce45806..38a943d283 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/attribute.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/attribute.h
@@ -76,7 +76,49 @@ namespace glslang {
EatMaxIterations,
EatIterationMultiple,
EatPeelCount,
- EatPartialCount
+ EatPartialCount,
+ EatFormatRgba32f,
+ EatFormatRgba16f,
+ EatFormatR32f,
+ EatFormatRgba8,
+ EatFormatRgba8Snorm,
+ EatFormatRg32f,
+ EatFormatRg16f,
+ EatFormatR11fG11fB10f,
+ EatFormatR16f,
+ EatFormatRgba16,
+ EatFormatRgb10A2,
+ EatFormatRg16,
+ EatFormatRg8,
+ EatFormatR16,
+ EatFormatR8,
+ EatFormatRgba16Snorm,
+ EatFormatRg16Snorm,
+ EatFormatRg8Snorm,
+ EatFormatR16Snorm,
+ EatFormatR8Snorm,
+ EatFormatRgba32i,
+ EatFormatRgba16i,
+ EatFormatRgba8i,
+ EatFormatR32i,
+ EatFormatRg32i,
+ EatFormatRg16i,
+ EatFormatRg8i,
+ EatFormatR16i,
+ EatFormatR8i,
+ EatFormatRgba32ui,
+ EatFormatRgba16ui,
+ EatFormatRgba8ui,
+ EatFormatR32ui,
+ EatFormatRgb10a2ui,
+ EatFormatRg32ui,
+ EatFormatRg16ui,
+ EatFormatRg8ui,
+ EatFormatR16ui,
+ EatFormatR8ui,
+ EatFormatUnknown,
+ EatNonWritable,
+ EatNonReadable
};
class TIntermAggregate;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/gl_types.h b/thirdparty/glslang/glslang/MachineIndependent/gl_types.h
index c9fee9ecce..b6f613bced 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/gl_types.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/gl_types.h
@@ -78,7 +78,6 @@
#define GL_DOUBLE_MAT4x2 0x8F4D
#define GL_DOUBLE_MAT4x3 0x8F4E
-#ifdef AMD_EXTENSIONS
// Those constants are borrowed from extension NV_gpu_shader5
#define GL_FLOAT16_NV 0x8FF8
#define GL_FLOAT16_VEC2_NV 0x8FF9
@@ -94,7 +93,6 @@
#define GL_FLOAT16_MAT3x4_AMD 0x91CB
#define GL_FLOAT16_MAT4x2_AMD 0x91CC
#define GL_FLOAT16_MAT4x3_AMD 0x91CD
-#endif
#define GL_SAMPLER_1D 0x8B5D
#define GL_SAMPLER_2D 0x8B5E
@@ -117,7 +115,6 @@
#define GL_SAMPLER_CUBE_MAP_ARRAY_ARB 0x900C
#define GL_SAMPLER_CUBE_MAP_ARRAY_SHADOW_ARB 0x900D
-#ifdef AMD_EXTENSIONS
#define GL_FLOAT16_SAMPLER_1D_AMD 0x91CE
#define GL_FLOAT16_SAMPLER_2D_AMD 0x91CF
#define GL_FLOAT16_SAMPLER_3D_AMD 0x91D0
@@ -149,7 +146,6 @@
#define GL_FLOAT16_IMAGE_BUFFER_AMD 0x91E8
#define GL_FLOAT16_IMAGE_2D_MULTISAMPLE_AMD 0x91E9
#define GL_FLOAT16_IMAGE_2D_MULTISAMPLE_ARRAY_AMD 0x91EA
-#endif
#define GL_INT_SAMPLER_1D 0x8DC9
#define GL_INT_SAMPLER_2D 0x8DCA
diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang.y b/thirdparty/glslang/glslang/MachineIndependent/glslang.y
index b5691a29fd..9f30fdb2ab 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/glslang.y
+++ b/thirdparty/glslang/glslang/MachineIndependent/glslang.y
@@ -36,6 +36,31 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+//
+// Do not edit the .y file, only edit the .m4 file.
+// The .y bison file is not a source file, it is a derivative of the .m4 file.
+// The m4 file needs to be processed by m4 to generate the .y bison file.
+//
+// Code sandwiched between a pair:
+//
+// GLSLANG_WEB_EXCLUDE_ON
+// ...
+// ...
+// ...
+// GLSLANG_WEB_EXCLUDE_OFF
+//
+// Will be excluded from the grammar when m4 is executed as:
+//
+// m4 -P -DGLSLANG_WEB
+//
+// It will be included when m4 is executed as:
+//
+// m4 -P
+//
+
+
+
+
/**
* This is bison grammar and productions for parsing all versions of the
* GLSL shading languages.
@@ -125,13 +150,34 @@ extern int yylex(YYSTYPE*, TParseContext&);
%pure-parser // enable thread safety
%expect 1 // One shift reduce conflict because of if | else
-%token <lex> ATTRIBUTE VARYING
-%token <lex> FLOAT16_T FLOAT FLOAT32_T DOUBLE FLOAT64_T
-%token <lex> CONST BOOL INT UINT INT64_T UINT64_T INT32_T UINT32_T INT16_T UINT16_T INT8_T UINT8_T
-%token <lex> BREAK CONTINUE DO ELSE FOR IF DISCARD RETURN SWITCH CASE DEFAULT SUBROUTINE
+%token <lex> CONST BOOL INT UINT FLOAT
%token <lex> BVEC2 BVEC3 BVEC4
%token <lex> IVEC2 IVEC3 IVEC4
%token <lex> UVEC2 UVEC3 UVEC4
+%token <lex> VEC2 VEC3 VEC4
+%token <lex> MAT2 MAT3 MAT4
+%token <lex> MAT2X2 MAT2X3 MAT2X4
+%token <lex> MAT3X2 MAT3X3 MAT3X4
+%token <lex> MAT4X2 MAT4X3 MAT4X4
+
+// combined image/sampler
+%token <lex> SAMPLER2D SAMPLER3D SAMPLERCUBE SAMPLER2DSHADOW
+%token <lex> SAMPLERCUBESHADOW SAMPLER2DARRAY
+%token <lex> SAMPLER2DARRAYSHADOW ISAMPLER2D ISAMPLER3D ISAMPLERCUBE
+%token <lex> ISAMPLER2DARRAY USAMPLER2D USAMPLER3D
+%token <lex> USAMPLERCUBE USAMPLER2DARRAY
+
+// separate image/sampler
+%token <lex> SAMPLER SAMPLERSHADOW
+%token <lex> TEXTURE2D TEXTURE3D TEXTURECUBE TEXTURE2DARRAY
+%token <lex> ITEXTURE2D ITEXTURE3D ITEXTURECUBE ITEXTURE2DARRAY
+%token <lex> UTEXTURE2D UTEXTURE3D UTEXTURECUBE UTEXTURE2DARRAY
+
+
+
+%token <lex> ATTRIBUTE VARYING
+%token <lex> FLOAT16_T FLOAT32_T DOUBLE FLOAT64_T
+%token <lex> INT64_T UINT64_T INT32_T UINT32_T INT16_T UINT16_T INT8_T UINT8_T
%token <lex> I64VEC2 I64VEC3 I64VEC4
%token <lex> U64VEC2 U64VEC3 U64VEC4
%token <lex> I32VEC2 I32VEC3 I32VEC4
@@ -140,19 +186,10 @@ extern int yylex(YYSTYPE*, TParseContext&);
%token <lex> U16VEC2 U16VEC3 U16VEC4
%token <lex> I8VEC2 I8VEC3 I8VEC4
%token <lex> U8VEC2 U8VEC3 U8VEC4
-%token <lex> VEC2 VEC3 VEC4
-%token <lex> MAT2 MAT3 MAT4 CENTROID IN OUT INOUT
-%token <lex> UNIFORM PATCH SAMPLE BUFFER SHARED NONUNIFORM PAYLOADNV PAYLOADINNV HITATTRNV CALLDATANV CALLDATAINNV
-%token <lex> COHERENT VOLATILE RESTRICT READONLY WRITEONLY DEVICECOHERENT QUEUEFAMILYCOHERENT WORKGROUPCOHERENT SUBGROUPCOHERENT NONPRIVATE
%token <lex> DVEC2 DVEC3 DVEC4 DMAT2 DMAT3 DMAT4
%token <lex> F16VEC2 F16VEC3 F16VEC4 F16MAT2 F16MAT3 F16MAT4
%token <lex> F32VEC2 F32VEC3 F32VEC4 F32MAT2 F32MAT3 F32MAT4
%token <lex> F64VEC2 F64VEC3 F64VEC4 F64MAT2 F64MAT3 F64MAT4
-%token <lex> NOPERSPECTIVE FLAT SMOOTH LAYOUT EXPLICITINTERPAMD PERVERTEXNV PERPRIMITIVENV PERVIEWNV PERTASKNV
-
-%token <lex> MAT2X2 MAT2X3 MAT2X4
-%token <lex> MAT3X2 MAT3X3 MAT3X4
-%token <lex> MAT4X2 MAT4X3 MAT4X4
%token <lex> DMAT2X2 DMAT2X3 DMAT2X4
%token <lex> DMAT3X2 DMAT3X3 DMAT3X4
%token <lex> DMAT4X2 DMAT4X3 DMAT4X4
@@ -167,41 +204,47 @@ extern int yylex(YYSTYPE*, TParseContext&);
%token <lex> F64MAT4X2 F64MAT4X3 F64MAT4X4
%token <lex> ATOMIC_UINT
%token <lex> ACCSTRUCTNV
-%token <lex> FCOOPMATNV
+%token <lex> FCOOPMATNV ICOOPMATNV UCOOPMATNV
// combined image/sampler
-%token <lex> SAMPLER1D SAMPLER2D SAMPLER3D SAMPLERCUBE SAMPLER1DSHADOW SAMPLER2DSHADOW
-%token <lex> SAMPLERCUBESHADOW SAMPLER1DARRAY SAMPLER2DARRAY SAMPLER1DARRAYSHADOW
-%token <lex> SAMPLER2DARRAYSHADOW ISAMPLER1D ISAMPLER2D ISAMPLER3D ISAMPLERCUBE
-%token <lex> ISAMPLER1DARRAY ISAMPLER2DARRAY USAMPLER1D USAMPLER2D USAMPLER3D
-%token <lex> USAMPLERCUBE USAMPLER1DARRAY USAMPLER2DARRAY
-%token <lex> SAMPLER2DRECT SAMPLER2DRECTSHADOW ISAMPLER2DRECT USAMPLER2DRECT
-%token <lex> SAMPLERBUFFER ISAMPLERBUFFER USAMPLERBUFFER
%token <lex> SAMPLERCUBEARRAY SAMPLERCUBEARRAYSHADOW
%token <lex> ISAMPLERCUBEARRAY USAMPLERCUBEARRAY
+%token <lex> SAMPLER1D SAMPLER1DARRAY SAMPLER1DARRAYSHADOW ISAMPLER1D SAMPLER1DSHADOW
+%token <lex> SAMPLER2DRECT SAMPLER2DRECTSHADOW ISAMPLER2DRECT USAMPLER2DRECT
+%token <lex> SAMPLERBUFFER ISAMPLERBUFFER USAMPLERBUFFER
%token <lex> SAMPLER2DMS ISAMPLER2DMS USAMPLER2DMS
%token <lex> SAMPLER2DMSARRAY ISAMPLER2DMSARRAY USAMPLER2DMSARRAY
%token <lex> SAMPLEREXTERNALOES
%token <lex> SAMPLEREXTERNAL2DY2YEXT
-
+%token <lex> ISAMPLER1DARRAY USAMPLER1D USAMPLER1DARRAY
%token <lex> F16SAMPLER1D F16SAMPLER2D F16SAMPLER3D F16SAMPLER2DRECT F16SAMPLERCUBE
%token <lex> F16SAMPLER1DARRAY F16SAMPLER2DARRAY F16SAMPLERCUBEARRAY
%token <lex> F16SAMPLERBUFFER F16SAMPLER2DMS F16SAMPLER2DMSARRAY
%token <lex> F16SAMPLER1DSHADOW F16SAMPLER2DSHADOW F16SAMPLER1DARRAYSHADOW F16SAMPLER2DARRAYSHADOW
%token <lex> F16SAMPLER2DRECTSHADOW F16SAMPLERCUBESHADOW F16SAMPLERCUBEARRAYSHADOW
-// pure sampler
-%token <lex> SAMPLER SAMPLERSHADOW
+// images
+%token <lex> IMAGE1D IIMAGE1D UIMAGE1D IMAGE2D IIMAGE2D
+%token <lex> UIMAGE2D IMAGE3D IIMAGE3D UIMAGE3D
+%token <lex> IMAGE2DRECT IIMAGE2DRECT UIMAGE2DRECT
+%token <lex> IMAGECUBE IIMAGECUBE UIMAGECUBE
+%token <lex> IMAGEBUFFER IIMAGEBUFFER UIMAGEBUFFER
+%token <lex> IMAGE1DARRAY IIMAGE1DARRAY UIMAGE1DARRAY
+%token <lex> IMAGE2DARRAY IIMAGE2DARRAY UIMAGE2DARRAY
+%token <lex> IMAGECUBEARRAY IIMAGECUBEARRAY UIMAGECUBEARRAY
+%token <lex> IMAGE2DMS IIMAGE2DMS UIMAGE2DMS
+%token <lex> IMAGE2DMSARRAY IIMAGE2DMSARRAY UIMAGE2DMSARRAY
+
+%token <lex> F16IMAGE1D F16IMAGE2D F16IMAGE3D F16IMAGE2DRECT
+%token <lex> F16IMAGECUBE F16IMAGE1DARRAY F16IMAGE2DARRAY F16IMAGECUBEARRAY
+%token <lex> F16IMAGEBUFFER F16IMAGE2DMS F16IMAGE2DMSARRAY
// texture without sampler
-%token <lex> TEXTURE1D TEXTURE2D TEXTURE3D TEXTURECUBE
-%token <lex> TEXTURE1DARRAY TEXTURE2DARRAY
-%token <lex> ITEXTURE1D ITEXTURE2D ITEXTURE3D ITEXTURECUBE
-%token <lex> ITEXTURE1DARRAY ITEXTURE2DARRAY UTEXTURE1D UTEXTURE2D UTEXTURE3D
-%token <lex> UTEXTURECUBE UTEXTURE1DARRAY UTEXTURE2DARRAY
+%token <lex> TEXTURECUBEARRAY ITEXTURECUBEARRAY UTEXTURECUBEARRAY
+%token <lex> TEXTURE1D ITEXTURE1D UTEXTURE1D
+%token <lex> TEXTURE1DARRAY ITEXTURE1DARRAY UTEXTURE1DARRAY
%token <lex> TEXTURE2DRECT ITEXTURE2DRECT UTEXTURE2DRECT
%token <lex> TEXTUREBUFFER ITEXTUREBUFFER UTEXTUREBUFFER
-%token <lex> TEXTURECUBEARRAY ITEXTURECUBEARRAY UTEXTURECUBEARRAY
%token <lex> TEXTURE2DMS ITEXTURE2DMS UTEXTURE2DMS
%token <lex> TEXTURE2DMSARRAY ITEXTURE2DMSARRAY UTEXTURE2DMSARRAY
@@ -213,25 +256,8 @@ extern int yylex(YYSTYPE*, TParseContext&);
%token <lex> SUBPASSINPUT SUBPASSINPUTMS ISUBPASSINPUT ISUBPASSINPUTMS USUBPASSINPUT USUBPASSINPUTMS
%token <lex> F16SUBPASSINPUT F16SUBPASSINPUTMS
-%token <lex> IMAGE1D IIMAGE1D UIMAGE1D IMAGE2D IIMAGE2D
-%token <lex> UIMAGE2D IMAGE3D IIMAGE3D UIMAGE3D
-%token <lex> IMAGE2DRECT IIMAGE2DRECT UIMAGE2DRECT
-%token <lex> IMAGECUBE IIMAGECUBE UIMAGECUBE
-%token <lex> IMAGEBUFFER IIMAGEBUFFER UIMAGEBUFFER
-%token <lex> IMAGE1DARRAY IIMAGE1DARRAY UIMAGE1DARRAY
-%token <lex> IMAGE2DARRAY IIMAGE2DARRAY UIMAGE2DARRAY
-%token <lex> IMAGECUBEARRAY IIMAGECUBEARRAY UIMAGECUBEARRAY
-%token <lex> IMAGE2DMS IIMAGE2DMS UIMAGE2DMS
-%token <lex> IMAGE2DMSARRAY IIMAGE2DMSARRAY UIMAGE2DMSARRAY
-%token <lex> F16IMAGE1D F16IMAGE2D F16IMAGE3D F16IMAGE2DRECT
-%token <lex> F16IMAGECUBE F16IMAGE1DARRAY F16IMAGE2DARRAY F16IMAGECUBEARRAY
-%token <lex> F16IMAGEBUFFER F16IMAGE2DMS F16IMAGE2DMSARRAY
-
-%token <lex> STRUCT VOID WHILE
-%token <lex> IDENTIFIER TYPE_NAME
-%token <lex> FLOATCONSTANT DOUBLECONSTANT INT16CONSTANT UINT16CONSTANT INT32CONSTANT UINT32CONSTANT INTCONSTANT UINTCONSTANT INT64CONSTANT UINT64CONSTANT BOOLCONSTANT FLOAT16CONSTANT
%token <lex> LEFT_OP RIGHT_OP
%token <lex> INC_OP DEC_OP LE_OP GE_OP EQ_OP NE_OP
%token <lex> AND_OP OR_OP XOR_OP MUL_ASSIGN DIV_ASSIGN ADD_ASSIGN
@@ -242,11 +268,30 @@ extern int yylex(YYSTYPE*, TParseContext&);
%token <lex> COMMA COLON EQUAL SEMICOLON BANG DASH TILDE PLUS STAR SLASH PERCENT
%token <lex> LEFT_ANGLE RIGHT_ANGLE VERTICAL_BAR CARET AMPERSAND QUESTION
-%token <lex> INVARIANT PRECISE
+%token <lex> INVARIANT
%token <lex> HIGH_PRECISION MEDIUM_PRECISION LOW_PRECISION PRECISION
-
%token <lex> PACKED RESOURCE SUPERP
+%token <lex> FLOATCONSTANT INTCONSTANT UINTCONSTANT BOOLCONSTANT
+%token <lex> IDENTIFIER TYPE_NAME
+%token <lex> CENTROID IN OUT INOUT
+%token <lex> STRUCT VOID WHILE
+%token <lex> BREAK CONTINUE DO ELSE FOR IF DISCARD RETURN SWITCH CASE DEFAULT
+%token <lex> UNIFORM SHARED BUFFER
+%token <lex> FLAT SMOOTH LAYOUT
+
+
+%token <lex> DOUBLECONSTANT INT16CONSTANT UINT16CONSTANT FLOAT16CONSTANT INT32CONSTANT UINT32CONSTANT
+%token <lex> INT64CONSTANT UINT64CONSTANT
+%token <lex> SUBROUTINE DEMOTE
+%token <lex> PAYLOADNV PAYLOADINNV HITATTRNV CALLDATANV CALLDATAINNV
+%token <lex> PATCH SAMPLE NONUNIFORM
+%token <lex> COHERENT VOLATILE RESTRICT READONLY WRITEONLY DEVICECOHERENT QUEUEFAMILYCOHERENT WORKGROUPCOHERENT
+%token <lex> SUBGROUPCOHERENT NONPRIVATE
+%token <lex> NOPERSPECTIVE EXPLICITINTERPAMD PERVERTEXNV PERPRIMITIVENV PERVIEWNV PERTASKNV
+%token <lex> PRECISE
+
+
%type <interm> assignment_operator unary_operator
%type <interm.intermTypedNode> variable_identifier primary_expression postfix_expression
%type <interm.intermTypedNode> expression integer_expression assignment_expression
@@ -255,7 +300,7 @@ extern int yylex(YYSTYPE*, TParseContext&);
%type <interm.intermTypedNode> conditional_expression constant_expression
%type <interm.intermTypedNode> logical_or_expression logical_xor_expression logical_and_expression
%type <interm.intermTypedNode> shift_expression and_expression exclusive_or_expression inclusive_or_expression
-%type <interm.intermTypedNode> function_call initializer initializer_list condition conditionopt
+%type <interm.intermTypedNode> function_call initializer condition conditionopt
%type <interm.intermNode> translation_unit function_definition
%type <interm.intermNode> statement simple_statement
@@ -271,9 +316,8 @@ extern int yylex(YYSTYPE*, TParseContext&);
%type <interm> parameter_declaration parameter_declarator parameter_type_specifier
%type <interm> array_specifier
-%type <interm.type> precise_qualifier invariant_qualifier interpolation_qualifier storage_qualifier precision_qualifier
+%type <interm.type> invariant_qualifier interpolation_qualifier storage_qualifier precision_qualifier
%type <interm.type> layout_qualifier layout_qualifier_id_list layout_qualifier_id
-%type <interm.type> non_uniform_qualifier
%type <interm.typeParameters> type_parameter_specifier
%type <interm.typeParameters> type_parameter_specifier_opt
@@ -284,7 +328,7 @@ extern int yylex(YYSTYPE*, TParseContext&);
%type <interm.type> type_specifier_nonarray
%type <interm.type> struct_specifier
%type <interm.typeLine> struct_declarator
-%type <interm.typeList> struct_declarator_list struct_declaration struct_declaration_list type_name_list
+%type <interm.typeList> struct_declarator_list struct_declaration struct_declaration_list
%type <interm> block_structure
%type <interm.function> function_header function_declarator
%type <interm.function> function_header_with_parameters
@@ -293,7 +337,13 @@ extern int yylex(YYSTYPE*, TParseContext&);
%type <interm.identifierList> identifier_list
+
+%type <interm.type> precise_qualifier non_uniform_qualifier
+%type <interm.typeList> type_name_list
%type <interm.attributes> attribute attribute_list single_attribute
+%type <interm.intermNode> demote_statement
+%type <interm.intermTypedNode> initializer_list
+
%start translation_unit
%%
@@ -308,13 +358,13 @@ primary_expression
: variable_identifier {
$$ = $1;
}
- | INT32CONSTANT {
- parseContext.explicitInt32Check($1.loc, "32-bit signed literal");
- $$ = parseContext.intermediate.addConstantUnion($1.i, $1.loc, true);
+ | LEFT_PAREN expression RIGHT_PAREN {
+ $$ = $2;
+ if ($$->getAsConstantUnion())
+ $$->getAsConstantUnion()->setExpression();
}
- | UINT32CONSTANT {
- parseContext.explicitInt32Check($1.loc, "32-bit signed literal");
- $$ = parseContext.intermediate.addConstantUnion($1.u, $1.loc, true);
+ | FLOATCONSTANT {
+ $$ = parseContext.intermediate.addConstantUnion($1.d, EbtFloat, $1.loc, true);
}
| INTCONSTANT {
$$ = parseContext.intermediate.addConstantUnion($1.i, $1.loc, true);
@@ -323,6 +373,18 @@ primary_expression
parseContext.fullIntegerCheck($1.loc, "unsigned literal");
$$ = parseContext.intermediate.addConstantUnion($1.u, $1.loc, true);
}
+ | BOOLCONSTANT {
+ $$ = parseContext.intermediate.addConstantUnion($1.b, $1.loc, true);
+ }
+
+ | INT32CONSTANT {
+ parseContext.explicitInt32Check($1.loc, "32-bit signed literal");
+ $$ = parseContext.intermediate.addConstantUnion($1.i, $1.loc, true);
+ }
+ | UINT32CONSTANT {
+ parseContext.explicitInt32Check($1.loc, "32-bit signed literal");
+ $$ = parseContext.intermediate.addConstantUnion($1.u, $1.loc, true);
+ }
| INT64CONSTANT {
parseContext.int64Check($1.loc, "64-bit integer literal");
$$ = parseContext.intermediate.addConstantUnion($1.i64, $1.loc, true);
@@ -339,25 +401,17 @@ primary_expression
parseContext.explicitInt16Check($1.loc, "16-bit unsigned integer literal");
$$ = parseContext.intermediate.addConstantUnion((unsigned short)$1.u, $1.loc, true);
}
- | FLOATCONSTANT {
- $$ = parseContext.intermediate.addConstantUnion($1.d, EbtFloat, $1.loc, true);
- }
| DOUBLECONSTANT {
- parseContext.doubleCheck($1.loc, "double literal");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double literal");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double literal");
$$ = parseContext.intermediate.addConstantUnion($1.d, EbtDouble, $1.loc, true);
}
| FLOAT16CONSTANT {
parseContext.float16Check($1.loc, "half float literal");
$$ = parseContext.intermediate.addConstantUnion($1.d, EbtFloat16, $1.loc, true);
}
- | BOOLCONSTANT {
- $$ = parseContext.intermediate.addConstantUnion($1.b, $1.loc, true);
- }
- | LEFT_PAREN expression RIGHT_PAREN {
- $$ = $2;
- if ($$->getAsConstantUnion())
- $$->getAsConstantUnion()->setExpression();
- }
+
;
postfix_expression
@@ -483,11 +537,13 @@ function_identifier
$$.function = new TFunction(empty, TType(EbtVoid), EOpNull);
}
}
+
| non_uniform_qualifier {
// Constructor
$$.intermNode = 0;
$$.function = parseContext.handleConstructorCall($1.loc, $1);
}
+
;
unary_expression
@@ -804,7 +860,6 @@ declaration
}
| PRECISION precision_qualifier type_specifier SEMICOLON {
parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "precision statement");
-
// lazy setting of the previous scope's defaults, has effect only the first time it is called in a particular scope
parseContext.symbolTable.setPreviousDefaultPrecisions(&parseContext.defaultPrecision[0]);
parseContext.setDefaultPrecision($1.loc, $3, $2.qualifier.precision);
@@ -1048,7 +1103,9 @@ single_declaration
: fully_specified_type {
$$.type = $1;
$$.intermNode = 0;
+
parseContext.declareTypeDefaults($$.loc, $$.type);
+
}
| fully_specified_type IDENTIFIER {
$$.type = $1;
@@ -1082,7 +1139,6 @@ fully_specified_type
parseContext.profileRequires($1.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "arrayed type");
}
-
parseContext.precisionQualifierCheck($$.loc, $$.basicType, $$.qualifier);
}
| type_qualifier type_specifier {
@@ -1135,38 +1191,30 @@ interpolation_qualifier
$$.init($1.loc);
$$.qualifier.flat = true;
}
+
| NOPERSPECTIVE {
parseContext.globalCheck($1.loc, "noperspective");
-#ifdef NV_EXTENSIONS
parseContext.profileRequires($1.loc, EEsProfile, 0, E_GL_NV_shader_noperspective_interpolation, "noperspective");
-#else
- parseContext.requireProfile($1.loc, ~EEsProfile, "noperspective");
-#endif
parseContext.profileRequires($1.loc, ENoProfile, 130, 0, "noperspective");
$$.init($1.loc);
$$.qualifier.nopersp = true;
}
| EXPLICITINTERPAMD {
-#ifdef AMD_EXTENSIONS
parseContext.globalCheck($1.loc, "__explicitInterpAMD");
parseContext.profileRequires($1.loc, ECoreProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation");
parseContext.profileRequires($1.loc, ECompatibilityProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation");
$$.init($1.loc);
$$.qualifier.explicitInterp = true;
-#endif
}
| PERVERTEXNV {
-#ifdef NV_EXTENSIONS
parseContext.globalCheck($1.loc, "pervertexNV");
parseContext.profileRequires($1.loc, ECoreProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric");
parseContext.profileRequires($1.loc, ECompatibilityProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric");
parseContext.profileRequires($1.loc, EEsProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric");
$$.init($1.loc);
$$.qualifier.pervertexNV = true;
-#endif
}
| PERPRIMITIVENV {
-#ifdef NV_EXTENSIONS
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck($1.loc, "perprimitiveNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshNVMask), "perprimitiveNV");
@@ -1175,26 +1223,22 @@ interpolation_qualifier
parseContext.requireExtensions($1.loc, 1, &E_GL_NV_mesh_shader, "perprimitiveNV");
$$.init($1.loc);
$$.qualifier.perPrimitiveNV = true;
-#endif
}
| PERVIEWNV {
-#ifdef NV_EXTENSIONS
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck($1.loc, "perviewNV");
parseContext.requireStage($1.loc, EShLangMeshNV, "perviewNV");
$$.init($1.loc);
$$.qualifier.perViewNV = true;
-#endif
}
| PERTASKNV {
-#ifdef NV_EXTENSIONS
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck($1.loc, "taskNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangTaskNVMask | EShLangMeshNVMask), "taskNV");
$$.init($1.loc);
$$.qualifier.perTaskNV = true;
-#endif
}
+
;
layout_qualifier
@@ -1229,6 +1273,7 @@ layout_qualifier_id
}
;
+
precise_qualifier
: PRECISE {
parseContext.profileRequires($$.loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader5, "precise");
@@ -1238,6 +1283,7 @@ precise_qualifier
}
;
+
type_qualifier
: single_type_qualifier {
$$ = $1;
@@ -1271,6 +1317,7 @@ single_type_qualifier
// allow inheritance of storage qualifier from block declaration
$$ = $1;
}
+
| precise_qualifier {
// allow inheritance of storage qualifier from block declaration
$$ = $1;
@@ -1278,6 +1325,7 @@ single_type_qualifier
| non_uniform_qualifier {
$$ = $1;
}
+
;
storage_qualifier
@@ -1285,6 +1333,49 @@ storage_qualifier
$$.init($1.loc);
$$.qualifier.storage = EvqConst; // will later turn into EvqConstReadOnly, if the initializer is not constant
}
+ | INOUT {
+ parseContext.globalCheck($1.loc, "inout");
+ $$.init($1.loc);
+ $$.qualifier.storage = EvqInOut;
+ }
+ | IN {
+ parseContext.globalCheck($1.loc, "in");
+ $$.init($1.loc);
+ // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later
+ $$.qualifier.storage = EvqIn;
+ }
+ | OUT {
+ parseContext.globalCheck($1.loc, "out");
+ $$.init($1.loc);
+ // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later
+ $$.qualifier.storage = EvqOut;
+ }
+ | CENTROID {
+ parseContext.profileRequires($1.loc, ENoProfile, 120, 0, "centroid");
+ parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "centroid");
+ parseContext.globalCheck($1.loc, "centroid");
+ $$.init($1.loc);
+ $$.qualifier.centroid = true;
+ }
+ | UNIFORM {
+ parseContext.globalCheck($1.loc, "uniform");
+ $$.init($1.loc);
+ $$.qualifier.storage = EvqUniform;
+ }
+ | SHARED {
+ parseContext.globalCheck($1.loc, "shared");
+ parseContext.profileRequires($1.loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared");
+ parseContext.profileRequires($1.loc, EEsProfile, 310, 0, "shared");
+ parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshNVMask | EShLangTaskNVMask), "shared");
+ $$.init($1.loc);
+ $$.qualifier.storage = EvqShared;
+ }
+ | BUFFER {
+ parseContext.globalCheck($1.loc, "buffer");
+ $$.init($1.loc);
+ $$.qualifier.storage = EvqBuffer;
+ }
+
| ATTRIBUTE {
parseContext.requireStage($1.loc, EShLangVertex, "attribute");
parseContext.checkDeprecated($1.loc, ECoreProfile, 130, "attribute");
@@ -1311,30 +1402,6 @@ storage_qualifier
else
$$.qualifier.storage = EvqVaryingIn;
}
- | INOUT {
- parseContext.globalCheck($1.loc, "inout");
- $$.init($1.loc);
- $$.qualifier.storage = EvqInOut;
- }
- | IN {
- parseContext.globalCheck($1.loc, "in");
- $$.init($1.loc);
- // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later
- $$.qualifier.storage = EvqIn;
- }
- | OUT {
- parseContext.globalCheck($1.loc, "out");
- $$.init($1.loc);
- // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later
- $$.qualifier.storage = EvqOut;
- }
- | CENTROID {
- parseContext.profileRequires($1.loc, ENoProfile, 120, 0, "centroid");
- parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "centroid");
- parseContext.globalCheck($1.loc, "centroid");
- $$.init($1.loc);
- $$.qualifier.centroid = true;
- }
| PATCH {
parseContext.globalCheck($1.loc, "patch");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch");
@@ -1346,76 +1413,44 @@ storage_qualifier
$$.init($1.loc);
$$.qualifier.sample = true;
}
- | UNIFORM {
- parseContext.globalCheck($1.loc, "uniform");
- $$.init($1.loc);
- $$.qualifier.storage = EvqUniform;
- }
- | BUFFER {
- parseContext.globalCheck($1.loc, "buffer");
- $$.init($1.loc);
- $$.qualifier.storage = EvqBuffer;
- }
| HITATTRNV {
-#ifdef NV_EXTENSIONS
parseContext.globalCheck($1.loc, "hitAttributeNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangIntersectNVMask | EShLangClosestHitNVMask
| EShLangAnyHitNVMask), "hitAttributeNV");
parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "hitAttributeNV");
$$.init($1.loc);
$$.qualifier.storage = EvqHitAttrNV;
-#endif
}
| PAYLOADNV {
-#ifdef NV_EXTENSIONS
parseContext.globalCheck($1.loc, "rayPayloadNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangClosestHitNVMask |
EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadNV");
parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV");
$$.init($1.loc);
$$.qualifier.storage = EvqPayloadNV;
-#endif
}
| PAYLOADINNV {
-#ifdef NV_EXTENSIONS
parseContext.globalCheck($1.loc, "rayPayloadInNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangClosestHitNVMask |
EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadInNV");
parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadInNV");
$$.init($1.loc);
$$.qualifier.storage = EvqPayloadInNV;
-#endif
}
| CALLDATANV {
-#ifdef NV_EXTENSIONS
parseContext.globalCheck($1.loc, "callableDataNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangRayGenNVMask |
EShLangClosestHitNVMask | EShLangMissNVMask | EShLangCallableNVMask), "callableDataNV");
parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataNV");
$$.init($1.loc);
$$.qualifier.storage = EvqCallableDataNV;
-#endif
}
| CALLDATAINNV {
-#ifdef NV_EXTENSIONS
parseContext.globalCheck($1.loc, "callableDataInNV");
parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangCallableNVMask), "callableDataInNV");
parseContext.profileRequires($1.loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataInNV");
$$.init($1.loc);
$$.qualifier.storage = EvqCallableDataInNV;
-#endif
- }
- | SHARED {
- parseContext.globalCheck($1.loc, "shared");
- parseContext.profileRequires($1.loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared");
- parseContext.profileRequires($1.loc, EEsProfile, 310, 0, "shared");
-#ifdef NV_EXTENSIONS
- parseContext.requireStage($1.loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshNVMask | EShLangTaskNVMask), "shared");
-#else
- parseContext.requireStage($1.loc, EShLangCompute, "shared");
-#endif
- $$.init($1.loc);
- $$.qualifier.storage = EvqShared;
}
| COHERENT {
$$.init($1.loc);
@@ -1474,8 +1509,10 @@ storage_qualifier
parseContext.unimplemented($1.loc, "subroutine");
$$.init($1.loc);
}
+
;
+
non_uniform_qualifier
: NONUNIFORM {
$$.init($1.loc);
@@ -1494,6 +1531,7 @@ type_name_list
}
;
+
type_specifier
: type_specifier_nonarray type_parameter_specifier_opt {
$$ = $1;
@@ -1577,8 +1615,147 @@ type_specifier_nonarray
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtFloat;
}
+ | INT {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtInt;
+ }
+ | UINT {
+ parseContext.fullIntegerCheck($1.loc, "unsigned integer");
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtUint;
+ }
+ | BOOL {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtBool;
+ }
+ | VEC2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setVector(2);
+ }
+ | VEC3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setVector(3);
+ }
+ | VEC4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setVector(4);
+ }
+ | BVEC2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtBool;
+ $$.setVector(2);
+ }
+ | BVEC3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtBool;
+ $$.setVector(3);
+ }
+ | BVEC4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtBool;
+ $$.setVector(4);
+ }
+ | IVEC2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtInt;
+ $$.setVector(2);
+ }
+ | IVEC3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtInt;
+ $$.setVector(3);
+ }
+ | IVEC4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtInt;
+ $$.setVector(4);
+ }
+ | UVEC2 {
+ parseContext.fullIntegerCheck($1.loc, "unsigned integer vector");
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtUint;
+ $$.setVector(2);
+ }
+ | UVEC3 {
+ parseContext.fullIntegerCheck($1.loc, "unsigned integer vector");
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtUint;
+ $$.setVector(3);
+ }
+ | UVEC4 {
+ parseContext.fullIntegerCheck($1.loc, "unsigned integer vector");
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtUint;
+ $$.setVector(4);
+ }
+ | MAT2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(2, 2);
+ }
+ | MAT3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(3, 3);
+ }
+ | MAT4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(4, 4);
+ }
+ | MAT2X2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(2, 2);
+ }
+ | MAT2X3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(2, 3);
+ }
+ | MAT2X4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(2, 4);
+ }
+ | MAT3X2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(3, 2);
+ }
+ | MAT3X3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(3, 3);
+ }
+ | MAT3X4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(3, 4);
+ }
+ | MAT4X2 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(4, 2);
+ }
+ | MAT4X3 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(4, 3);
+ }
+ | MAT4X4 {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtFloat;
+ $$.setMatrix(4, 4);
+ }
+
| DOUBLE {
- parseContext.doubleCheck($1.loc, "double");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
}
@@ -1597,15 +1774,6 @@ type_specifier_nonarray
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
}
- | INT {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtInt;
- }
- | UINT {
- parseContext.fullIntegerCheck($1.loc, "unsigned integer");
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtUint;
- }
| INT8_T {
parseContext.int8ScalarVectorCheck($1.loc, "8-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -1646,39 +1814,26 @@ type_specifier_nonarray
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtUint64;
}
- | BOOL {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtBool;
- }
- | VEC2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setVector(2);
- }
- | VEC3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setVector(3);
- }
- | VEC4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setVector(4);
- }
| DVEC2 {
- parseContext.doubleCheck($1.loc, "double vector");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double vector");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double vector");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setVector(2);
}
| DVEC3 {
- parseContext.doubleCheck($1.loc, "double vector");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double vector");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double vector");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setVector(3);
}
| DVEC4 {
- parseContext.doubleCheck($1.loc, "double vector");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double vector");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double vector");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setVector(4);
@@ -1737,36 +1892,6 @@ type_specifier_nonarray
$$.basicType = EbtDouble;
$$.setVector(4);
}
- | BVEC2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtBool;
- $$.setVector(2);
- }
- | BVEC3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtBool;
- $$.setVector(3);
- }
- | BVEC4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtBool;
- $$.setVector(4);
- }
- | IVEC2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtInt;
- $$.setVector(2);
- }
- | IVEC3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtInt;
- $$.setVector(3);
- }
- | IVEC4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtInt;
- $$.setVector(4);
- }
| I8VEC2 {
parseContext.int8ScalarVectorCheck($1.loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -1839,24 +1964,6 @@ type_specifier_nonarray
$$.basicType = EbtInt64;
$$.setVector(4);
}
- | UVEC2 {
- parseContext.fullIntegerCheck($1.loc, "unsigned integer vector");
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtUint;
- $$.setVector(2);
- }
- | UVEC3 {
- parseContext.fullIntegerCheck($1.loc, "unsigned integer vector");
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtUint;
- $$.setVector(3);
- }
- | UVEC4 {
- parseContext.fullIntegerCheck($1.loc, "unsigned integer vector");
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtUint;
- $$.setVector(4);
- }
| U8VEC2 {
parseContext.int8ScalarVectorCheck($1.loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -1929,134 +2036,98 @@ type_specifier_nonarray
$$.basicType = EbtUint64;
$$.setVector(4);
}
- | MAT2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(2, 2);
- }
- | MAT3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(3, 3);
- }
- | MAT4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(4, 4);
- }
- | MAT2X2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(2, 2);
- }
- | MAT2X3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(2, 3);
- }
- | MAT2X4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(2, 4);
- }
- | MAT3X2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(3, 2);
- }
- | MAT3X3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(3, 3);
- }
- | MAT3X4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(3, 4);
- }
- | MAT4X2 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(4, 2);
- }
- | MAT4X3 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(4, 3);
- }
- | MAT4X4 {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtFloat;
- $$.setMatrix(4, 4);
- }
| DMAT2 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(2, 2);
}
| DMAT3 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(3, 3);
}
| DMAT4 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(4, 4);
}
| DMAT2X2 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(2, 2);
}
| DMAT2X3 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(2, 3);
}
| DMAT2X4 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(2, 4);
}
| DMAT3X2 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(3, 2);
}
| DMAT3X3 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(3, 3);
}
| DMAT3X4 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(3, 4);
}
| DMAT4X2 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(4, 2);
}
| DMAT4X3 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(4, 3);
}
| DMAT4X4 {
- parseContext.doubleCheck($1.loc, "double matrix");
+ parseContext.requireProfile($1.loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck($1.loc, "double matrix");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtDouble;
$$.setMatrix(4, 4);
@@ -2278,10 +2349,8 @@ type_specifier_nonarray
$$.setMatrix(4, 4);
}
| ACCSTRUCTNV {
-#ifdef NV_EXTENSIONS
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtAccStructNV;
-#endif
}
| ATOMIC_UINT {
parseContext.vulkanRemoved($1.loc, "atomic counter types");
@@ -2293,6 +2362,7 @@ type_specifier_nonarray
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat, Esd1D);
}
+
| SAMPLER2D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
@@ -2308,11 +2378,6 @@ type_specifier_nonarray
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat, EsdCube);
}
- | SAMPLER1DSHADOW {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.set(EbtFloat, Esd1D, false, true);
- }
| SAMPLER2DSHADOW {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
@@ -2323,25 +2388,31 @@ type_specifier_nonarray
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat, EsdCube, false, true);
}
- | SAMPLER1DARRAY {
+ | SAMPLER2DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtFloat, Esd1D, true);
+ $$.sampler.set(EbtFloat, Esd2D, true);
}
- | SAMPLER2DARRAY {
+ | SAMPLER2DARRAYSHADOW {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtFloat, Esd2D, true);
+ $$.sampler.set(EbtFloat, Esd2D, true, true);
}
- | SAMPLER1DARRAYSHADOW {
+
+ | SAMPLER1DSHADOW {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtFloat, Esd1D, true, true);
+ $$.sampler.set(EbtFloat, Esd1D, false, true);
}
- | SAMPLER2DARRAYSHADOW {
+ | SAMPLER1DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtFloat, Esd2D, true, true);
+ $$.sampler.set(EbtFloat, Esd1D, true);
+ }
+ | SAMPLER1DARRAYSHADOW {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.set(EbtFloat, Esd1D, true, true);
}
| SAMPLERCUBEARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2354,114 +2425,89 @@ type_specifier_nonarray
$$.sampler.set(EbtFloat, EsdCube, true, true);
}
| F16SAMPLER1D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd1D);
-#endif
}
| F16SAMPLER2D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd2D);
-#endif
}
| F16SAMPLER3D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd3D);
-#endif
}
| F16SAMPLERCUBE {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdCube);
-#endif
}
| F16SAMPLER1DSHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd1D, false, true);
-#endif
}
| F16SAMPLER2DSHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd2D, false, true);
-#endif
}
| F16SAMPLERCUBESHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdCube, false, true);
-#endif
}
| F16SAMPLER1DARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd1D, true);
-#endif
}
| F16SAMPLER2DARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd2D, true);
-#endif
}
| F16SAMPLER1DARRAYSHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd1D, true, true);
-#endif
}
| F16SAMPLER2DARRAYSHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd2D, true, true);
-#endif
}
| F16SAMPLERCUBEARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdCube, true);
-#endif
}
| F16SAMPLERCUBEARRAYSHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdCube, true, true);
-#endif
}
| ISAMPLER1D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtInt, Esd1D);
}
+
| ISAMPLER2D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
@@ -2477,15 +2523,31 @@ type_specifier_nonarray
$$.basicType = EbtSampler;
$$.sampler.set(EbtInt, EsdCube);
}
- | ISAMPLER1DARRAY {
+ | ISAMPLER2DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtInt, Esd1D, true);
+ $$.sampler.set(EbtInt, Esd2D, true);
}
- | ISAMPLER2DARRAY {
+ | USAMPLER2D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtInt, Esd2D, true);
+ $$.sampler.set(EbtUint, Esd2D);
+ }
+ | USAMPLER3D {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.set(EbtUint, Esd3D);
+ }
+ | USAMPLERCUBE {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.set(EbtUint, EsdCube);
+ }
+
+ | ISAMPLER1DARRAY {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.set(EbtInt, Esd1D, true);
}
| ISAMPLERCUBEARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2497,36 +2559,108 @@ type_specifier_nonarray
$$.basicType = EbtSampler;
$$.sampler.set(EbtUint, Esd1D);
}
- | USAMPLER2D {
+ | USAMPLER1DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtUint, Esd2D);
+ $$.sampler.set(EbtUint, Esd1D, true);
}
- | USAMPLER3D {
+ | USAMPLERCUBEARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtUint, Esd3D);
+ $$.sampler.set(EbtUint, EsdCube, true);
}
- | USAMPLERCUBE {
+ | TEXTURECUBEARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtUint, EsdCube);
+ $$.sampler.setTexture(EbtFloat, EsdCube, true);
}
- | USAMPLER1DARRAY {
+ | ITEXTURECUBEARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtUint, Esd1D, true);
+ $$.sampler.setTexture(EbtInt, EsdCube, true);
}
+ | UTEXTURECUBEARRAY {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtUint, EsdCube, true);
+ }
+
| USAMPLER2DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtUint, Esd2D, true);
}
- | USAMPLERCUBEARRAY {
+ | TEXTURE2D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
- $$.sampler.set(EbtUint, EsdCube, true);
+ $$.sampler.setTexture(EbtFloat, Esd2D);
+ }
+ | TEXTURE3D {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtFloat, Esd3D);
+ }
+ | TEXTURE2DARRAY {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtFloat, Esd2D, true);
}
+ | TEXTURECUBE {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtFloat, EsdCube);
+ }
+ | ITEXTURE2D {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtInt, Esd2D);
+ }
+ | ITEXTURE3D {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtInt, Esd3D);
+ }
+ | ITEXTURECUBE {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtInt, EsdCube);
+ }
+ | ITEXTURE2DARRAY {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtInt, Esd2D, true);
+ }
+ | UTEXTURE2D {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtUint, Esd2D);
+ }
+ | UTEXTURE3D {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtUint, Esd3D);
+ }
+ | UTEXTURECUBE {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtUint, EsdCube);
+ }
+ | UTEXTURE2DARRAY {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setTexture(EbtUint, Esd2D, true);
+ }
+ | SAMPLER {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setPureSampler(false);
+ }
+ | SAMPLERSHADOW {
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtSampler;
+ $$.sampler.setPureSampler(true);
+ }
+
| SAMPLER2DRECT {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
@@ -2538,20 +2672,16 @@ type_specifier_nonarray
$$.sampler.set(EbtFloat, EsdRect, false, true);
}
| F16SAMPLER2DRECT {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdRect);
-#endif
}
| F16SAMPLER2DRECTSHADOW {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdRect, false, true);
-#endif
}
| ISAMPLER2DRECT {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2569,12 +2699,10 @@ type_specifier_nonarray
$$.sampler.set(EbtFloat, EsdBuffer);
}
| F16SAMPLERBUFFER {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, EsdBuffer);
-#endif
}
| ISAMPLERBUFFER {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2592,12 +2720,10 @@ type_specifier_nonarray
$$.sampler.set(EbtFloat, Esd2D, false, false, true);
}
| F16SAMPLER2DMS {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd2D, false, false, true);
-#endif
}
| ISAMPLER2DMS {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2615,12 +2741,10 @@ type_specifier_nonarray
$$.sampler.set(EbtFloat, Esd2D, true, false, true);
}
| F16SAMPLER2DMSARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.set(EbtFloat16, Esd2D, true, false, true);
-#endif
}
| ISAMPLER2DMSARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2632,67 +2756,34 @@ type_specifier_nonarray
$$.basicType = EbtSampler;
$$.sampler.set(EbtUint, Esd2D, true, false, true);
}
- | SAMPLER {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setPureSampler(false);
- }
- | SAMPLERSHADOW {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setPureSampler(true);
- }
| TEXTURE1D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat, Esd1D);
}
| F16TEXTURE1D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd1D);
-#endif
- }
- | TEXTURE2D {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtFloat, Esd2D);
}
| F16TEXTURE2D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd2D);
-#endif
- }
- | TEXTURE3D {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtFloat, Esd3D);
}
| F16TEXTURE3D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd3D);
-#endif
- }
- | TEXTURECUBE {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtFloat, EsdCube);
}
| F16TEXTURECUBE {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, EsdCube);
-#endif
}
| TEXTURE1DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2700,121 +2791,53 @@ type_specifier_nonarray
$$.sampler.setTexture(EbtFloat, Esd1D, true);
}
| F16TEXTURE1DARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd1D, true);
-#endif
- }
- | TEXTURE2DARRAY {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtFloat, Esd2D, true);
}
| F16TEXTURE2DARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd2D, true);
-#endif
- }
- | TEXTURECUBEARRAY {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtFloat, EsdCube, true);
}
| F16TEXTURECUBEARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, EsdCube, true);
-#endif
}
| ITEXTURE1D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtInt, Esd1D);
}
- | ITEXTURE2D {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtInt, Esd2D);
- }
- | ITEXTURE3D {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtInt, Esd3D);
- }
- | ITEXTURECUBE {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtInt, EsdCube);
- }
| ITEXTURE1DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtInt, Esd1D, true);
}
- | ITEXTURE2DARRAY {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtInt, Esd2D, true);
- }
- | ITEXTURECUBEARRAY {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtInt, EsdCube, true);
- }
| UTEXTURE1D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtUint, Esd1D);
}
- | UTEXTURE2D {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtUint, Esd2D);
- }
- | UTEXTURE3D {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtUint, Esd3D);
- }
- | UTEXTURECUBE {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtUint, EsdCube);
- }
| UTEXTURE1DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtUint, Esd1D, true);
}
- | UTEXTURE2DARRAY {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtUint, Esd2D, true);
- }
- | UTEXTURECUBEARRAY {
- $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
- $$.basicType = EbtSampler;
- $$.sampler.setTexture(EbtUint, EsdCube, true);
- }
| TEXTURE2DRECT {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat, EsdRect);
}
| F16TEXTURE2DRECT {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, EsdRect);
-#endif
}
| ITEXTURE2DRECT {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2832,12 +2855,10 @@ type_specifier_nonarray
$$.sampler.setTexture(EbtFloat, EsdBuffer);
}
| F16TEXTUREBUFFER {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, EsdBuffer);
-#endif
}
| ITEXTUREBUFFER {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2855,12 +2876,10 @@ type_specifier_nonarray
$$.sampler.setTexture(EbtFloat, Esd2D, false, false, true);
}
| F16TEXTURE2DMS {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd2D, false, false, true);
-#endif
}
| ITEXTURE2DMS {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2878,12 +2897,10 @@ type_specifier_nonarray
$$.sampler.setTexture(EbtFloat, Esd2D, true, false, true);
}
| F16TEXTURE2DMSARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setTexture(EbtFloat16, Esd2D, true, false, true);
-#endif
}
| ITEXTURE2DMSARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2901,12 +2918,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd1D);
}
| F16IMAGE1D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd1D);
-#endif
}
| IIMAGE1D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2924,12 +2939,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd2D);
}
| F16IMAGE2D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd2D);
-#endif
}
| IIMAGE2D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2947,12 +2960,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd3D);
}
| F16IMAGE3D {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd3D);
-#endif
}
| IIMAGE3D {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2970,12 +2981,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, EsdRect);
}
| F16IMAGE2DRECT {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, EsdRect);
-#endif
}
| IIMAGE2DRECT {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -2993,12 +3002,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, EsdCube);
}
| F16IMAGECUBE {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, EsdCube);
-#endif
}
| IIMAGECUBE {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3016,12 +3023,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, EsdBuffer);
}
| F16IMAGEBUFFER {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, EsdBuffer);
-#endif
}
| IIMAGEBUFFER {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3039,12 +3044,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd1D, true);
}
| F16IMAGE1DARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd1D, true);
-#endif
}
| IIMAGE1DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3062,12 +3065,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd2D, true);
}
| F16IMAGE2DARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd2D, true);
-#endif
}
| IIMAGE2DARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3085,12 +3086,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, EsdCube, true);
}
| F16IMAGECUBEARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, EsdCube, true);
-#endif
}
| IIMAGECUBEARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3108,12 +3107,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd2D, false, false, true);
}
| F16IMAGE2DMS {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd2D, false, false, true);
-#endif
}
| IIMAGE2DMS {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3131,12 +3128,10 @@ type_specifier_nonarray
$$.sampler.setImage(EbtFloat, Esd2D, true, false, true);
}
| F16IMAGE2DMSARRAY {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setImage(EbtFloat16, Esd2D, true, false, true);
-#endif
}
| IIMAGE2DMSARRAY {
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
@@ -3173,22 +3168,18 @@ type_specifier_nonarray
$$.sampler.setSubpass(EbtFloat, true);
}
| F16SUBPASSINPUT {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel());
parseContext.requireStage($1.loc, EShLangFragment, "subpass input");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setSubpass(EbtFloat16);
-#endif
}
| F16SUBPASSINPUTMS {
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck($1.loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel());
parseContext.requireStage($1.loc, EShLangFragment, "subpass input");
$$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
$$.basicType = EbtSampler;
$$.sampler.setSubpass(EbtFloat16, true);
-#endif
}
| ISUBPASSINPUT {
parseContext.requireStage($1.loc, EShLangFragment, "subpass input");
@@ -3220,6 +3211,19 @@ type_specifier_nonarray
$$.basicType = EbtFloat;
$$.coopmat = true;
}
+ | ICOOPMATNV {
+ parseContext.intcoopmatCheck($1.loc, "icoopmatNV", parseContext.symbolTable.atBuiltInLevel());
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtInt;
+ $$.coopmat = true;
+ }
+ | UCOOPMATNV {
+ parseContext.intcoopmatCheck($1.loc, "ucoopmatNV", parseContext.symbolTable.atBuiltInLevel());
+ $$.init($1.loc, parseContext.symbolTable.atGlobalLevel());
+ $$.basicType = EbtUint;
+ $$.coopmat = true;
+ }
+
| struct_specifier {
$$ = $1;
$$.qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary;
@@ -3300,7 +3304,7 @@ struct_declaration
if ($1.arraySizes) {
parseContext.profileRequires($1.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
parseContext.profileRequires($1.loc, EEsProfile, 300, 0, "arrayed type");
- if (parseContext.profile == EEsProfile)
+ if (parseContext.isEsProfile())
parseContext.arraySizeRequiredCheck($1.loc, *$1.arraySizes);
}
@@ -3322,7 +3326,7 @@ struct_declaration
if ($2.arraySizes) {
parseContext.profileRequires($2.loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
parseContext.profileRequires($2.loc, EEsProfile, 300, 0, "arrayed type");
- if (parseContext.profile == EEsProfile)
+ if (parseContext.isEsProfile())
parseContext.arraySizeRequiredCheck($2.loc, *$2.arraySizes);
}
@@ -3374,6 +3378,7 @@ initializer
: assignment_expression {
$$ = $1;
}
+
| LEFT_BRACE initializer_list RIGHT_BRACE {
const char* initFeature = "{ } style initializers";
parseContext.requireProfile($1.loc, ~EEsProfile, initFeature);
@@ -3386,8 +3391,10 @@ initializer
parseContext.profileRequires($1.loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature);
$$ = $2;
}
+
;
+
initializer_list
: initializer {
$$ = parseContext.intermediate.growAggregate(0, $1, $1->getLoc());
@@ -3397,6 +3404,7 @@ initializer_list
}
;
+
declaration_statement
: declaration { $$ = $1; }
;
@@ -3416,8 +3424,21 @@ simple_statement
| case_label { $$ = $1; }
| iteration_statement { $$ = $1; }
| jump_statement { $$ = $1; }
+
+ | demote_statement { $$ = $1; }
+
;
+
+demote_statement
+ : DEMOTE SEMICOLON {
+ parseContext.requireStage($1.loc, EShLangFragment, "demote");
+ parseContext.requireExtensions($1.loc, 1, &E_GL_EXT_demote_to_helper_invocation, "demote");
+ $$ = parseContext.intermediate.addBranch(EOpDemote, $1.loc);
+ }
+ ;
+
+
compound_statement
: LEFT_BRACE RIGHT_BRACE { $$ = 0; }
| LEFT_BRACE {
@@ -3500,11 +3521,13 @@ selection_statement
: selection_statement_nonattributed {
$$ = $1;
}
+
| attribute selection_statement_nonattributed {
parseContext.handleSelectionAttributes(*$1, $2);
$$ = $2;
}
+
selection_statement_nonattributed
: IF LEFT_PAREN expression RIGHT_PAREN selection_rest_statement {
parseContext.boolCheck($1.loc, $3);
@@ -3545,11 +3568,13 @@ switch_statement
: switch_statement_nonattributed {
$$ = $1;
}
+
| attribute switch_statement_nonattributed {
parseContext.handleSwitchAttributes(*$1, $2);
$$ = $2;
}
+
switch_statement_nonattributed
: SWITCH LEFT_PAREN expression RIGHT_PAREN {
// start new switch sequence on the switch stack
@@ -3607,11 +3632,13 @@ iteration_statement
: iteration_statement_nonattributed {
$$ = $1;
}
+
| attribute iteration_statement_nonattributed {
parseContext.handleLoopAttributes(*$1, $2);
$$ = $2;
}
+
iteration_statement_nonattributed
: WHILE LEFT_PAREN {
if (! parseContext.limits.whileLoops)
@@ -3742,11 +3769,13 @@ external_declaration
| declaration {
$$ = $1;
}
+
| SEMICOLON {
parseContext.requireProfile($1.loc, ~EEsProfile, "extraneous semicolon");
parseContext.profileRequires($1.loc, ~EEsProfile, 460, nullptr, "extraneous semicolon");
$$ = nullptr;
}
+
;
function_definition
@@ -3771,6 +3800,7 @@ function_definition
}
;
+
attribute
: LEFT_BRACKET LEFT_BRACKET attribute_list RIGHT_BRACKET RIGHT_BRACKET {
$$ = $3;
@@ -3793,4 +3823,5 @@ single_attribute
$$ = parseContext.makeAttributes(*$1.string, $3);
}
+
%%
diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp
index 07feffea60..2a47faada4 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp
@@ -62,7 +62,7 @@
/* Copy the first part of user declarations. */
-#line 43 "MachineIndependent/glslang.y" /* yacc.c:339 */
+#line 68 "MachineIndependent/glslang.y" /* yacc.c:339 */
/* Based on:
@@ -123,271 +123,271 @@ extern int yydebug;
# define YYTOKENTYPE
enum yytokentype
{
- ATTRIBUTE = 258,
- VARYING = 259,
- FLOAT16_T = 260,
- FLOAT = 261,
- FLOAT32_T = 262,
- DOUBLE = 263,
- FLOAT64_T = 264,
- CONST = 265,
- BOOL = 266,
- INT = 267,
- UINT = 268,
- INT64_T = 269,
- UINT64_T = 270,
- INT32_T = 271,
- UINT32_T = 272,
- INT16_T = 273,
- UINT16_T = 274,
- INT8_T = 275,
- UINT8_T = 276,
- BREAK = 277,
- CONTINUE = 278,
- DO = 279,
- ELSE = 280,
- FOR = 281,
- IF = 282,
- DISCARD = 283,
- RETURN = 284,
- SWITCH = 285,
- CASE = 286,
- DEFAULT = 287,
- SUBROUTINE = 288,
- BVEC2 = 289,
- BVEC3 = 290,
- BVEC4 = 291,
- IVEC2 = 292,
- IVEC3 = 293,
- IVEC4 = 294,
- UVEC2 = 295,
- UVEC3 = 296,
- UVEC4 = 297,
- I64VEC2 = 298,
- I64VEC3 = 299,
- I64VEC4 = 300,
- U64VEC2 = 301,
- U64VEC3 = 302,
- U64VEC4 = 303,
- I32VEC2 = 304,
- I32VEC3 = 305,
- I32VEC4 = 306,
- U32VEC2 = 307,
- U32VEC3 = 308,
- U32VEC4 = 309,
- I16VEC2 = 310,
- I16VEC3 = 311,
- I16VEC4 = 312,
- U16VEC2 = 313,
- U16VEC3 = 314,
- U16VEC4 = 315,
- I8VEC2 = 316,
- I8VEC3 = 317,
- I8VEC4 = 318,
- U8VEC2 = 319,
- U8VEC3 = 320,
- U8VEC4 = 321,
- VEC2 = 322,
- VEC3 = 323,
- VEC4 = 324,
- MAT2 = 325,
- MAT3 = 326,
- MAT4 = 327,
- CENTROID = 328,
- IN = 329,
- OUT = 330,
- INOUT = 331,
- UNIFORM = 332,
- PATCH = 333,
- SAMPLE = 334,
- BUFFER = 335,
- SHARED = 336,
- NONUNIFORM = 337,
- PAYLOADNV = 338,
- PAYLOADINNV = 339,
- HITATTRNV = 340,
- CALLDATANV = 341,
- CALLDATAINNV = 342,
- COHERENT = 343,
- VOLATILE = 344,
- RESTRICT = 345,
- READONLY = 346,
- WRITEONLY = 347,
- DEVICECOHERENT = 348,
- QUEUEFAMILYCOHERENT = 349,
- WORKGROUPCOHERENT = 350,
- SUBGROUPCOHERENT = 351,
- NONPRIVATE = 352,
- DVEC2 = 353,
- DVEC3 = 354,
- DVEC4 = 355,
- DMAT2 = 356,
- DMAT3 = 357,
- DMAT4 = 358,
- F16VEC2 = 359,
- F16VEC3 = 360,
- F16VEC4 = 361,
- F16MAT2 = 362,
- F16MAT3 = 363,
- F16MAT4 = 364,
- F32VEC2 = 365,
- F32VEC3 = 366,
- F32VEC4 = 367,
- F32MAT2 = 368,
- F32MAT3 = 369,
- F32MAT4 = 370,
- F64VEC2 = 371,
- F64VEC3 = 372,
- F64VEC4 = 373,
- F64MAT2 = 374,
- F64MAT3 = 375,
- F64MAT4 = 376,
- NOPERSPECTIVE = 377,
- FLAT = 378,
- SMOOTH = 379,
- LAYOUT = 380,
- EXPLICITINTERPAMD = 381,
- PERVERTEXNV = 382,
- PERPRIMITIVENV = 383,
- PERVIEWNV = 384,
- PERTASKNV = 385,
- MAT2X2 = 386,
- MAT2X3 = 387,
- MAT2X4 = 388,
- MAT3X2 = 389,
- MAT3X3 = 390,
- MAT3X4 = 391,
- MAT4X2 = 392,
- MAT4X3 = 393,
- MAT4X4 = 394,
- DMAT2X2 = 395,
- DMAT2X3 = 396,
- DMAT2X4 = 397,
- DMAT3X2 = 398,
- DMAT3X3 = 399,
- DMAT3X4 = 400,
- DMAT4X2 = 401,
- DMAT4X3 = 402,
- DMAT4X4 = 403,
- F16MAT2X2 = 404,
- F16MAT2X3 = 405,
- F16MAT2X4 = 406,
- F16MAT3X2 = 407,
- F16MAT3X3 = 408,
- F16MAT3X4 = 409,
- F16MAT4X2 = 410,
- F16MAT4X3 = 411,
- F16MAT4X4 = 412,
- F32MAT2X2 = 413,
- F32MAT2X3 = 414,
- F32MAT2X4 = 415,
- F32MAT3X2 = 416,
- F32MAT3X3 = 417,
- F32MAT3X4 = 418,
- F32MAT4X2 = 419,
- F32MAT4X3 = 420,
- F32MAT4X4 = 421,
- F64MAT2X2 = 422,
- F64MAT2X3 = 423,
- F64MAT2X4 = 424,
- F64MAT3X2 = 425,
- F64MAT3X3 = 426,
- F64MAT3X4 = 427,
- F64MAT4X2 = 428,
- F64MAT4X3 = 429,
- F64MAT4X4 = 430,
- ATOMIC_UINT = 431,
- ACCSTRUCTNV = 432,
- FCOOPMATNV = 433,
- SAMPLER1D = 434,
- SAMPLER2D = 435,
- SAMPLER3D = 436,
- SAMPLERCUBE = 437,
- SAMPLER1DSHADOW = 438,
- SAMPLER2DSHADOW = 439,
- SAMPLERCUBESHADOW = 440,
- SAMPLER1DARRAY = 441,
- SAMPLER2DARRAY = 442,
- SAMPLER1DARRAYSHADOW = 443,
- SAMPLER2DARRAYSHADOW = 444,
- ISAMPLER1D = 445,
- ISAMPLER2D = 446,
- ISAMPLER3D = 447,
- ISAMPLERCUBE = 448,
- ISAMPLER1DARRAY = 449,
- ISAMPLER2DARRAY = 450,
- USAMPLER1D = 451,
- USAMPLER2D = 452,
- USAMPLER3D = 453,
- USAMPLERCUBE = 454,
- USAMPLER1DARRAY = 455,
- USAMPLER2DARRAY = 456,
- SAMPLER2DRECT = 457,
- SAMPLER2DRECTSHADOW = 458,
- ISAMPLER2DRECT = 459,
- USAMPLER2DRECT = 460,
- SAMPLERBUFFER = 461,
- ISAMPLERBUFFER = 462,
- USAMPLERBUFFER = 463,
- SAMPLERCUBEARRAY = 464,
- SAMPLERCUBEARRAYSHADOW = 465,
- ISAMPLERCUBEARRAY = 466,
- USAMPLERCUBEARRAY = 467,
- SAMPLER2DMS = 468,
- ISAMPLER2DMS = 469,
- USAMPLER2DMS = 470,
- SAMPLER2DMSARRAY = 471,
- ISAMPLER2DMSARRAY = 472,
- USAMPLER2DMSARRAY = 473,
- SAMPLEREXTERNALOES = 474,
- SAMPLEREXTERNAL2DY2YEXT = 475,
- F16SAMPLER1D = 476,
- F16SAMPLER2D = 477,
- F16SAMPLER3D = 478,
- F16SAMPLER2DRECT = 479,
- F16SAMPLERCUBE = 480,
- F16SAMPLER1DARRAY = 481,
- F16SAMPLER2DARRAY = 482,
- F16SAMPLERCUBEARRAY = 483,
- F16SAMPLERBUFFER = 484,
- F16SAMPLER2DMS = 485,
- F16SAMPLER2DMSARRAY = 486,
- F16SAMPLER1DSHADOW = 487,
- F16SAMPLER2DSHADOW = 488,
- F16SAMPLER1DARRAYSHADOW = 489,
- F16SAMPLER2DARRAYSHADOW = 490,
- F16SAMPLER2DRECTSHADOW = 491,
- F16SAMPLERCUBESHADOW = 492,
- F16SAMPLERCUBEARRAYSHADOW = 493,
- SAMPLER = 494,
- SAMPLERSHADOW = 495,
- TEXTURE1D = 496,
- TEXTURE2D = 497,
- TEXTURE3D = 498,
- TEXTURECUBE = 499,
- TEXTURE1DARRAY = 500,
- TEXTURE2DARRAY = 501,
- ITEXTURE1D = 502,
- ITEXTURE2D = 503,
- ITEXTURE3D = 504,
- ITEXTURECUBE = 505,
- ITEXTURE1DARRAY = 506,
- ITEXTURE2DARRAY = 507,
- UTEXTURE1D = 508,
- UTEXTURE2D = 509,
- UTEXTURE3D = 510,
- UTEXTURECUBE = 511,
- UTEXTURE1DARRAY = 512,
- UTEXTURE2DARRAY = 513,
- TEXTURE2DRECT = 514,
- ITEXTURE2DRECT = 515,
- UTEXTURE2DRECT = 516,
- TEXTUREBUFFER = 517,
- ITEXTUREBUFFER = 518,
- UTEXTUREBUFFER = 519,
- TEXTURECUBEARRAY = 520,
- ITEXTURECUBEARRAY = 521,
- UTEXTURECUBEARRAY = 522,
+ CONST = 258,
+ BOOL = 259,
+ INT = 260,
+ UINT = 261,
+ FLOAT = 262,
+ BVEC2 = 263,
+ BVEC3 = 264,
+ BVEC4 = 265,
+ IVEC2 = 266,
+ IVEC3 = 267,
+ IVEC4 = 268,
+ UVEC2 = 269,
+ UVEC3 = 270,
+ UVEC4 = 271,
+ VEC2 = 272,
+ VEC3 = 273,
+ VEC4 = 274,
+ MAT2 = 275,
+ MAT3 = 276,
+ MAT4 = 277,
+ MAT2X2 = 278,
+ MAT2X3 = 279,
+ MAT2X4 = 280,
+ MAT3X2 = 281,
+ MAT3X3 = 282,
+ MAT3X4 = 283,
+ MAT4X2 = 284,
+ MAT4X3 = 285,
+ MAT4X4 = 286,
+ SAMPLER2D = 287,
+ SAMPLER3D = 288,
+ SAMPLERCUBE = 289,
+ SAMPLER2DSHADOW = 290,
+ SAMPLERCUBESHADOW = 291,
+ SAMPLER2DARRAY = 292,
+ SAMPLER2DARRAYSHADOW = 293,
+ ISAMPLER2D = 294,
+ ISAMPLER3D = 295,
+ ISAMPLERCUBE = 296,
+ ISAMPLER2DARRAY = 297,
+ USAMPLER2D = 298,
+ USAMPLER3D = 299,
+ USAMPLERCUBE = 300,
+ USAMPLER2DARRAY = 301,
+ SAMPLER = 302,
+ SAMPLERSHADOW = 303,
+ TEXTURE2D = 304,
+ TEXTURE3D = 305,
+ TEXTURECUBE = 306,
+ TEXTURE2DARRAY = 307,
+ ITEXTURE2D = 308,
+ ITEXTURE3D = 309,
+ ITEXTURECUBE = 310,
+ ITEXTURE2DARRAY = 311,
+ UTEXTURE2D = 312,
+ UTEXTURE3D = 313,
+ UTEXTURECUBE = 314,
+ UTEXTURE2DARRAY = 315,
+ ATTRIBUTE = 316,
+ VARYING = 317,
+ FLOAT16_T = 318,
+ FLOAT32_T = 319,
+ DOUBLE = 320,
+ FLOAT64_T = 321,
+ INT64_T = 322,
+ UINT64_T = 323,
+ INT32_T = 324,
+ UINT32_T = 325,
+ INT16_T = 326,
+ UINT16_T = 327,
+ INT8_T = 328,
+ UINT8_T = 329,
+ I64VEC2 = 330,
+ I64VEC3 = 331,
+ I64VEC4 = 332,
+ U64VEC2 = 333,
+ U64VEC3 = 334,
+ U64VEC4 = 335,
+ I32VEC2 = 336,
+ I32VEC3 = 337,
+ I32VEC4 = 338,
+ U32VEC2 = 339,
+ U32VEC3 = 340,
+ U32VEC4 = 341,
+ I16VEC2 = 342,
+ I16VEC3 = 343,
+ I16VEC4 = 344,
+ U16VEC2 = 345,
+ U16VEC3 = 346,
+ U16VEC4 = 347,
+ I8VEC2 = 348,
+ I8VEC3 = 349,
+ I8VEC4 = 350,
+ U8VEC2 = 351,
+ U8VEC3 = 352,
+ U8VEC4 = 353,
+ DVEC2 = 354,
+ DVEC3 = 355,
+ DVEC4 = 356,
+ DMAT2 = 357,
+ DMAT3 = 358,
+ DMAT4 = 359,
+ F16VEC2 = 360,
+ F16VEC3 = 361,
+ F16VEC4 = 362,
+ F16MAT2 = 363,
+ F16MAT3 = 364,
+ F16MAT4 = 365,
+ F32VEC2 = 366,
+ F32VEC3 = 367,
+ F32VEC4 = 368,
+ F32MAT2 = 369,
+ F32MAT3 = 370,
+ F32MAT4 = 371,
+ F64VEC2 = 372,
+ F64VEC3 = 373,
+ F64VEC4 = 374,
+ F64MAT2 = 375,
+ F64MAT3 = 376,
+ F64MAT4 = 377,
+ DMAT2X2 = 378,
+ DMAT2X3 = 379,
+ DMAT2X4 = 380,
+ DMAT3X2 = 381,
+ DMAT3X3 = 382,
+ DMAT3X4 = 383,
+ DMAT4X2 = 384,
+ DMAT4X3 = 385,
+ DMAT4X4 = 386,
+ F16MAT2X2 = 387,
+ F16MAT2X3 = 388,
+ F16MAT2X4 = 389,
+ F16MAT3X2 = 390,
+ F16MAT3X3 = 391,
+ F16MAT3X4 = 392,
+ F16MAT4X2 = 393,
+ F16MAT4X3 = 394,
+ F16MAT4X4 = 395,
+ F32MAT2X2 = 396,
+ F32MAT2X3 = 397,
+ F32MAT2X4 = 398,
+ F32MAT3X2 = 399,
+ F32MAT3X3 = 400,
+ F32MAT3X4 = 401,
+ F32MAT4X2 = 402,
+ F32MAT4X3 = 403,
+ F32MAT4X4 = 404,
+ F64MAT2X2 = 405,
+ F64MAT2X3 = 406,
+ F64MAT2X4 = 407,
+ F64MAT3X2 = 408,
+ F64MAT3X3 = 409,
+ F64MAT3X4 = 410,
+ F64MAT4X2 = 411,
+ F64MAT4X3 = 412,
+ F64MAT4X4 = 413,
+ ATOMIC_UINT = 414,
+ ACCSTRUCTNV = 415,
+ FCOOPMATNV = 416,
+ ICOOPMATNV = 417,
+ UCOOPMATNV = 418,
+ SAMPLERCUBEARRAY = 419,
+ SAMPLERCUBEARRAYSHADOW = 420,
+ ISAMPLERCUBEARRAY = 421,
+ USAMPLERCUBEARRAY = 422,
+ SAMPLER1D = 423,
+ SAMPLER1DARRAY = 424,
+ SAMPLER1DARRAYSHADOW = 425,
+ ISAMPLER1D = 426,
+ SAMPLER1DSHADOW = 427,
+ SAMPLER2DRECT = 428,
+ SAMPLER2DRECTSHADOW = 429,
+ ISAMPLER2DRECT = 430,
+ USAMPLER2DRECT = 431,
+ SAMPLERBUFFER = 432,
+ ISAMPLERBUFFER = 433,
+ USAMPLERBUFFER = 434,
+ SAMPLER2DMS = 435,
+ ISAMPLER2DMS = 436,
+ USAMPLER2DMS = 437,
+ SAMPLER2DMSARRAY = 438,
+ ISAMPLER2DMSARRAY = 439,
+ USAMPLER2DMSARRAY = 440,
+ SAMPLEREXTERNALOES = 441,
+ SAMPLEREXTERNAL2DY2YEXT = 442,
+ ISAMPLER1DARRAY = 443,
+ USAMPLER1D = 444,
+ USAMPLER1DARRAY = 445,
+ F16SAMPLER1D = 446,
+ F16SAMPLER2D = 447,
+ F16SAMPLER3D = 448,
+ F16SAMPLER2DRECT = 449,
+ F16SAMPLERCUBE = 450,
+ F16SAMPLER1DARRAY = 451,
+ F16SAMPLER2DARRAY = 452,
+ F16SAMPLERCUBEARRAY = 453,
+ F16SAMPLERBUFFER = 454,
+ F16SAMPLER2DMS = 455,
+ F16SAMPLER2DMSARRAY = 456,
+ F16SAMPLER1DSHADOW = 457,
+ F16SAMPLER2DSHADOW = 458,
+ F16SAMPLER1DARRAYSHADOW = 459,
+ F16SAMPLER2DARRAYSHADOW = 460,
+ F16SAMPLER2DRECTSHADOW = 461,
+ F16SAMPLERCUBESHADOW = 462,
+ F16SAMPLERCUBEARRAYSHADOW = 463,
+ IMAGE1D = 464,
+ IIMAGE1D = 465,
+ UIMAGE1D = 466,
+ IMAGE2D = 467,
+ IIMAGE2D = 468,
+ UIMAGE2D = 469,
+ IMAGE3D = 470,
+ IIMAGE3D = 471,
+ UIMAGE3D = 472,
+ IMAGE2DRECT = 473,
+ IIMAGE2DRECT = 474,
+ UIMAGE2DRECT = 475,
+ IMAGECUBE = 476,
+ IIMAGECUBE = 477,
+ UIMAGECUBE = 478,
+ IMAGEBUFFER = 479,
+ IIMAGEBUFFER = 480,
+ UIMAGEBUFFER = 481,
+ IMAGE1DARRAY = 482,
+ IIMAGE1DARRAY = 483,
+ UIMAGE1DARRAY = 484,
+ IMAGE2DARRAY = 485,
+ IIMAGE2DARRAY = 486,
+ UIMAGE2DARRAY = 487,
+ IMAGECUBEARRAY = 488,
+ IIMAGECUBEARRAY = 489,
+ UIMAGECUBEARRAY = 490,
+ IMAGE2DMS = 491,
+ IIMAGE2DMS = 492,
+ UIMAGE2DMS = 493,
+ IMAGE2DMSARRAY = 494,
+ IIMAGE2DMSARRAY = 495,
+ UIMAGE2DMSARRAY = 496,
+ F16IMAGE1D = 497,
+ F16IMAGE2D = 498,
+ F16IMAGE3D = 499,
+ F16IMAGE2DRECT = 500,
+ F16IMAGECUBE = 501,
+ F16IMAGE1DARRAY = 502,
+ F16IMAGE2DARRAY = 503,
+ F16IMAGECUBEARRAY = 504,
+ F16IMAGEBUFFER = 505,
+ F16IMAGE2DMS = 506,
+ F16IMAGE2DMSARRAY = 507,
+ TEXTURECUBEARRAY = 508,
+ ITEXTURECUBEARRAY = 509,
+ UTEXTURECUBEARRAY = 510,
+ TEXTURE1D = 511,
+ ITEXTURE1D = 512,
+ UTEXTURE1D = 513,
+ TEXTURE1DARRAY = 514,
+ ITEXTURE1DARRAY = 515,
+ UTEXTURE1DARRAY = 516,
+ TEXTURE2DRECT = 517,
+ ITEXTURE2DRECT = 518,
+ UTEXTURE2DRECT = 519,
+ TEXTUREBUFFER = 520,
+ ITEXTUREBUFFER = 521,
+ UTEXTUREBUFFER = 522,
TEXTURE2DMS = 523,
ITEXTURE2DMS = 524,
UTEXTURE2DMS = 525,
@@ -413,121 +413,124 @@ extern int yydebug;
USUBPASSINPUTMS = 545,
F16SUBPASSINPUT = 546,
F16SUBPASSINPUTMS = 547,
- IMAGE1D = 548,
- IIMAGE1D = 549,
- UIMAGE1D = 550,
- IMAGE2D = 551,
- IIMAGE2D = 552,
- UIMAGE2D = 553,
- IMAGE3D = 554,
- IIMAGE3D = 555,
- UIMAGE3D = 556,
- IMAGE2DRECT = 557,
- IIMAGE2DRECT = 558,
- UIMAGE2DRECT = 559,
- IMAGECUBE = 560,
- IIMAGECUBE = 561,
- UIMAGECUBE = 562,
- IMAGEBUFFER = 563,
- IIMAGEBUFFER = 564,
- UIMAGEBUFFER = 565,
- IMAGE1DARRAY = 566,
- IIMAGE1DARRAY = 567,
- UIMAGE1DARRAY = 568,
- IMAGE2DARRAY = 569,
- IIMAGE2DARRAY = 570,
- UIMAGE2DARRAY = 571,
- IMAGECUBEARRAY = 572,
- IIMAGECUBEARRAY = 573,
- UIMAGECUBEARRAY = 574,
- IMAGE2DMS = 575,
- IIMAGE2DMS = 576,
- UIMAGE2DMS = 577,
- IMAGE2DMSARRAY = 578,
- IIMAGE2DMSARRAY = 579,
- UIMAGE2DMSARRAY = 580,
- F16IMAGE1D = 581,
- F16IMAGE2D = 582,
- F16IMAGE3D = 583,
- F16IMAGE2DRECT = 584,
- F16IMAGECUBE = 585,
- F16IMAGE1DARRAY = 586,
- F16IMAGE2DARRAY = 587,
- F16IMAGECUBEARRAY = 588,
- F16IMAGEBUFFER = 589,
- F16IMAGE2DMS = 590,
- F16IMAGE2DMSARRAY = 591,
- STRUCT = 592,
- VOID = 593,
- WHILE = 594,
- IDENTIFIER = 595,
- TYPE_NAME = 596,
- FLOATCONSTANT = 597,
- DOUBLECONSTANT = 598,
- INT16CONSTANT = 599,
- UINT16CONSTANT = 600,
- INT32CONSTANT = 601,
- UINT32CONSTANT = 602,
- INTCONSTANT = 603,
- UINTCONSTANT = 604,
- INT64CONSTANT = 605,
- UINT64CONSTANT = 606,
- BOOLCONSTANT = 607,
- FLOAT16CONSTANT = 608,
- LEFT_OP = 609,
- RIGHT_OP = 610,
- INC_OP = 611,
- DEC_OP = 612,
- LE_OP = 613,
- GE_OP = 614,
- EQ_OP = 615,
- NE_OP = 616,
- AND_OP = 617,
- OR_OP = 618,
- XOR_OP = 619,
- MUL_ASSIGN = 620,
- DIV_ASSIGN = 621,
- ADD_ASSIGN = 622,
- MOD_ASSIGN = 623,
- LEFT_ASSIGN = 624,
- RIGHT_ASSIGN = 625,
- AND_ASSIGN = 626,
- XOR_ASSIGN = 627,
- OR_ASSIGN = 628,
- SUB_ASSIGN = 629,
- LEFT_PAREN = 630,
- RIGHT_PAREN = 631,
- LEFT_BRACKET = 632,
- RIGHT_BRACKET = 633,
- LEFT_BRACE = 634,
- RIGHT_BRACE = 635,
- DOT = 636,
- COMMA = 637,
- COLON = 638,
- EQUAL = 639,
- SEMICOLON = 640,
- BANG = 641,
- DASH = 642,
- TILDE = 643,
- PLUS = 644,
- STAR = 645,
- SLASH = 646,
- PERCENT = 647,
- LEFT_ANGLE = 648,
- RIGHT_ANGLE = 649,
- VERTICAL_BAR = 650,
- CARET = 651,
- AMPERSAND = 652,
- QUESTION = 653,
- INVARIANT = 654,
- PRECISE = 655,
- HIGH_PRECISION = 656,
- MEDIUM_PRECISION = 657,
- LOW_PRECISION = 658,
- PRECISION = 659,
- PACKED = 660,
- RESOURCE = 661,
- SUPERP = 662
+ LEFT_OP = 548,
+ RIGHT_OP = 549,
+ INC_OP = 550,
+ DEC_OP = 551,
+ LE_OP = 552,
+ GE_OP = 553,
+ EQ_OP = 554,
+ NE_OP = 555,
+ AND_OP = 556,
+ OR_OP = 557,
+ XOR_OP = 558,
+ MUL_ASSIGN = 559,
+ DIV_ASSIGN = 560,
+ ADD_ASSIGN = 561,
+ MOD_ASSIGN = 562,
+ LEFT_ASSIGN = 563,
+ RIGHT_ASSIGN = 564,
+ AND_ASSIGN = 565,
+ XOR_ASSIGN = 566,
+ OR_ASSIGN = 567,
+ SUB_ASSIGN = 568,
+ LEFT_PAREN = 569,
+ RIGHT_PAREN = 570,
+ LEFT_BRACKET = 571,
+ RIGHT_BRACKET = 572,
+ LEFT_BRACE = 573,
+ RIGHT_BRACE = 574,
+ DOT = 575,
+ COMMA = 576,
+ COLON = 577,
+ EQUAL = 578,
+ SEMICOLON = 579,
+ BANG = 580,
+ DASH = 581,
+ TILDE = 582,
+ PLUS = 583,
+ STAR = 584,
+ SLASH = 585,
+ PERCENT = 586,
+ LEFT_ANGLE = 587,
+ RIGHT_ANGLE = 588,
+ VERTICAL_BAR = 589,
+ CARET = 590,
+ AMPERSAND = 591,
+ QUESTION = 592,
+ INVARIANT = 593,
+ HIGH_PRECISION = 594,
+ MEDIUM_PRECISION = 595,
+ LOW_PRECISION = 596,
+ PRECISION = 597,
+ PACKED = 598,
+ RESOURCE = 599,
+ SUPERP = 600,
+ FLOATCONSTANT = 601,
+ INTCONSTANT = 602,
+ UINTCONSTANT = 603,
+ BOOLCONSTANT = 604,
+ IDENTIFIER = 605,
+ TYPE_NAME = 606,
+ CENTROID = 607,
+ IN = 608,
+ OUT = 609,
+ INOUT = 610,
+ STRUCT = 611,
+ VOID = 612,
+ WHILE = 613,
+ BREAK = 614,
+ CONTINUE = 615,
+ DO = 616,
+ ELSE = 617,
+ FOR = 618,
+ IF = 619,
+ DISCARD = 620,
+ RETURN = 621,
+ SWITCH = 622,
+ CASE = 623,
+ DEFAULT = 624,
+ UNIFORM = 625,
+ SHARED = 626,
+ BUFFER = 627,
+ FLAT = 628,
+ SMOOTH = 629,
+ LAYOUT = 630,
+ DOUBLECONSTANT = 631,
+ INT16CONSTANT = 632,
+ UINT16CONSTANT = 633,
+ FLOAT16CONSTANT = 634,
+ INT32CONSTANT = 635,
+ UINT32CONSTANT = 636,
+ INT64CONSTANT = 637,
+ UINT64CONSTANT = 638,
+ SUBROUTINE = 639,
+ DEMOTE = 640,
+ PAYLOADNV = 641,
+ PAYLOADINNV = 642,
+ HITATTRNV = 643,
+ CALLDATANV = 644,
+ CALLDATAINNV = 645,
+ PATCH = 646,
+ SAMPLE = 647,
+ NONUNIFORM = 648,
+ COHERENT = 649,
+ VOLATILE = 650,
+ RESTRICT = 651,
+ READONLY = 652,
+ WRITEONLY = 653,
+ DEVICECOHERENT = 654,
+ QUEUEFAMILYCOHERENT = 655,
+ WORKGROUPCOHERENT = 656,
+ SUBGROUPCOHERENT = 657,
+ NONPRIVATE = 658,
+ NOPERSPECTIVE = 659,
+ EXPLICITINTERPAMD = 660,
+ PERVERTEXNV = 661,
+ PERPRIMITIVENV = 662,
+ PERVIEWNV = 663,
+ PERTASKNV = 664,
+ PRECISE = 665
};
#endif
@@ -536,7 +539,7 @@ extern int yydebug;
union YYSTYPE
{
-#line 71 "MachineIndependent/glslang.y" /* yacc.c:355 */
+#line 96 "MachineIndependent/glslang.y" /* yacc.c:355 */
struct {
glslang::TSourceLoc loc;
@@ -572,7 +575,7 @@ union YYSTYPE
glslang::TArraySizes* typeParameters;
} interm;
-#line 576 "MachineIndependent/glslang_tab.cpp" /* yacc.c:355 */
+#line 579 "MachineIndependent/glslang_tab.cpp" /* yacc.c:355 */
};
typedef union YYSTYPE YYSTYPE;
@@ -587,7 +590,7 @@ int yyparse (glslang::TParseContext* pParseContext);
#endif /* !YY_YY_MACHINEINDEPENDENT_GLSLANG_TAB_CPP_H_INCLUDED */
/* Copy the second part of user declarations. */
-#line 107 "MachineIndependent/glslang.y" /* yacc.c:358 */
+#line 132 "MachineIndependent/glslang.y" /* yacc.c:358 */
/* windows only pragma */
@@ -603,7 +606,7 @@ int yyparse (glslang::TParseContext* pParseContext);
extern int yylex(YYSTYPE*, TParseContext&);
-#line 607 "MachineIndependent/glslang_tab.cpp" /* yacc.c:358 */
+#line 610 "MachineIndependent/glslang_tab.cpp" /* yacc.c:358 */
#ifdef short
# undef short
@@ -843,23 +846,23 @@ union yyalloc
#endif /* !YYCOPY_NEEDED */
/* YYFINAL -- State number of the termination state. */
-#define YYFINAL 384
+#define YYFINAL 386
/* YYLAST -- Last index in YYTABLE. */
-#define YYLAST 9348
+#define YYLAST 9369
/* YYNTOKENS -- Number of terminals. */
-#define YYNTOKENS 408
+#define YYNTOKENS 411
/* YYNNTS -- Number of nonterminals. */
-#define YYNNTS 110
+#define YYNNTS 111
/* YYNRULES -- Number of rules. */
-#define YYNRULES 578
+#define YYNRULES 582
/* YYNSTATES -- Number of states. */
-#define YYNSTATES 722
+#define YYNSTATES 727
/* YYTRANSLATE[YYX] -- Symbol number corresponding to YYX as returned
by yylex, with out-of-bounds checking. */
#define YYUNDEFTOK 2
-#define YYMAXUTOK 662
+#define YYMAXUTOK 665
#define YYTRANSLATE(YYX) \
((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK)
@@ -934,71 +937,72 @@ static const yytype_uint16 yytranslate[] =
375, 376, 377, 378, 379, 380, 381, 382, 383, 384,
385, 386, 387, 388, 389, 390, 391, 392, 393, 394,
395, 396, 397, 398, 399, 400, 401, 402, 403, 404,
- 405, 406, 407
+ 405, 406, 407, 408, 409, 410
};
#if YYDEBUG
/* YYRLINE[YYN] -- Source line where rule number YYN was defined. */
static const yytype_uint16 yyrline[] =
{
- 0, 302, 302, 308, 311, 315, 319, 322, 326, 330,
- 334, 338, 342, 345, 349, 353, 356, 364, 367, 370,
- 373, 376, 381, 389, 396, 403, 409, 413, 420, 423,
- 429, 436, 446, 454, 459, 486, 494, 500, 504, 508,
- 528, 529, 530, 531, 537, 538, 543, 548, 557, 558,
- 563, 571, 572, 578, 587, 588, 593, 598, 603, 611,
- 612, 621, 633, 634, 643, 644, 653, 654, 663, 664,
- 672, 673, 681, 682, 690, 691, 691, 709, 710, 726,
- 730, 734, 738, 743, 747, 751, 755, 759, 763, 767,
- 774, 777, 788, 795, 800, 805, 813, 817, 821, 825,
- 830, 835, 844, 844, 855, 859, 866, 873, 876, 883,
- 891, 911, 934, 949, 974, 985, 995, 1005, 1015, 1024,
- 1027, 1031, 1035, 1040, 1048, 1053, 1058, 1063, 1068, 1077,
- 1088, 1115, 1124, 1131, 1138, 1149, 1158, 1168, 1180, 1189,
- 1201, 1207, 1210, 1217, 1221, 1225, 1233, 1242, 1245, 1256,
- 1259, 1262, 1266, 1270, 1274, 1278, 1284, 1288, 1300, 1314,
- 1319, 1325, 1331, 1338, 1344, 1349, 1354, 1359, 1369, 1379,
- 1389, 1399, 1408, 1420, 1424, 1429, 1434, 1439, 1444, 1449,
- 1453, 1457, 1461, 1465, 1471, 1480, 1487, 1490, 1498, 1503,
- 1513, 1518, 1526, 1530, 1540, 1543, 1549, 1555, 1562, 1572,
- 1576, 1580, 1585, 1590, 1595, 1600, 1604, 1609, 1614, 1619,
- 1624, 1629, 1634, 1639, 1644, 1649, 1653, 1658, 1663, 1668,
- 1674, 1680, 1686, 1692, 1698, 1704, 1710, 1716, 1722, 1728,
- 1734, 1740, 1745, 1750, 1755, 1760, 1765, 1770, 1776, 1782,
- 1788, 1794, 1800, 1806, 1812, 1818, 1824, 1830, 1836, 1842,
- 1848, 1854, 1860, 1866, 1872, 1878, 1884, 1890, 1896, 1902,
- 1908, 1914, 1920, 1926, 1932, 1937, 1942, 1947, 1952, 1957,
- 1962, 1967, 1972, 1977, 1982, 1987, 1992, 1998, 2004, 2010,
- 2016, 2022, 2028, 2034, 2040, 2046, 2052, 2058, 2064, 2070,
- 2076, 2082, 2088, 2094, 2100, 2106, 2112, 2118, 2124, 2130,
- 2136, 2142, 2148, 2154, 2160, 2166, 2172, 2178, 2184, 2190,
- 2196, 2202, 2208, 2214, 2220, 2226, 2232, 2238, 2244, 2250,
- 2256, 2262, 2268, 2274, 2280, 2286, 2291, 2296, 2301, 2306,
- 2311, 2316, 2321, 2326, 2331, 2336, 2341, 2346, 2351, 2356,
- 2364, 2372, 2380, 2388, 2396, 2404, 2412, 2420, 2428, 2436,
- 2444, 2452, 2460, 2465, 2470, 2475, 2480, 2485, 2490, 2495,
- 2500, 2505, 2510, 2515, 2520, 2525, 2530, 2535, 2540, 2548,
- 2556, 2561, 2566, 2571, 2579, 2584, 2589, 2594, 2602, 2607,
- 2612, 2617, 2625, 2630, 2635, 2640, 2645, 2650, 2658, 2663,
- 2671, 2676, 2684, 2689, 2697, 2702, 2710, 2715, 2723, 2728,
- 2736, 2741, 2746, 2751, 2756, 2761, 2766, 2771, 2776, 2781,
- 2786, 2791, 2796, 2801, 2806, 2811, 2819, 2824, 2829, 2834,
- 2842, 2847, 2852, 2857, 2865, 2870, 2875, 2880, 2888, 2893,
- 2898, 2903, 2911, 2916, 2921, 2926, 2934, 2939, 2944, 2949,
- 2957, 2962, 2967, 2972, 2980, 2985, 2990, 2995, 3003, 3008,
- 3013, 3018, 3026, 3031, 3036, 3041, 3049, 3054, 3059, 3064,
- 3072, 3077, 3082, 3087, 3095, 3100, 3105, 3110, 3118, 3123,
- 3128, 3133, 3141, 3146, 3151, 3157, 3163, 3169, 3175, 3184,
- 3193, 3199, 3205, 3211, 3217, 3223, 3228, 3244, 3249, 3254,
- 3262, 3262, 3273, 3273, 3283, 3286, 3299, 3321, 3348, 3352,
- 3358, 3363, 3374, 3377, 3383, 3392, 3395, 3401, 3405, 3406,
- 3412, 3413, 3414, 3415, 3416, 3417, 3418, 3422, 3423, 3427,
- 3423, 3439, 3440, 3444, 3444, 3451, 3451, 3465, 3468, 3476,
- 3484, 3495, 3496, 3500, 3503, 3509, 3516, 3520, 3528, 3532,
- 3545, 3548, 3554, 3554, 3574, 3577, 3583, 3595, 3607, 3610,
- 3616, 3616, 3631, 3631, 3647, 3647, 3668, 3671, 3677, 3680,
- 3686, 3690, 3697, 3702, 3707, 3714, 3717, 3726, 3730, 3739,
- 3742, 3745, 3753, 3753, 3775, 3781, 3784, 3789, 3792
+ 0, 352, 352, 358, 361, 366, 369, 372, 376, 380,
+ 384, 388, 392, 396, 400, 404, 408, 416, 419, 422,
+ 425, 428, 433, 441, 448, 455, 461, 465, 472, 475,
+ 481, 488, 498, 506, 511, 539, 548, 554, 558, 562,
+ 582, 583, 584, 585, 591, 592, 597, 602, 611, 612,
+ 617, 625, 626, 632, 641, 642, 647, 652, 657, 665,
+ 666, 675, 687, 688, 697, 698, 707, 708, 717, 718,
+ 726, 727, 735, 736, 744, 745, 745, 763, 764, 780,
+ 784, 788, 792, 797, 801, 805, 809, 813, 817, 821,
+ 828, 831, 842, 849, 854, 859, 866, 870, 874, 878,
+ 883, 888, 897, 897, 908, 912, 919, 926, 929, 936,
+ 944, 964, 987, 1002, 1027, 1038, 1048, 1058, 1068, 1077,
+ 1080, 1084, 1088, 1093, 1101, 1108, 1113, 1118, 1123, 1132,
+ 1142, 1169, 1178, 1185, 1193, 1200, 1207, 1215, 1225, 1232,
+ 1243, 1249, 1252, 1259, 1263, 1267, 1276, 1286, 1289, 1300,
+ 1303, 1306, 1310, 1314, 1319, 1323, 1330, 1334, 1339, 1345,
+ 1351, 1358, 1363, 1371, 1377, 1389, 1403, 1409, 1414, 1422,
+ 1430, 1438, 1446, 1453, 1457, 1462, 1467, 1472, 1477, 1482,
+ 1486, 1490, 1494, 1498, 1504, 1515, 1522, 1525, 1534, 1539,
+ 1549, 1554, 1562, 1566, 1576, 1579, 1585, 1591, 1598, 1608,
+ 1612, 1616, 1620, 1625, 1629, 1634, 1639, 1644, 1649, 1654,
+ 1659, 1664, 1669, 1674, 1680, 1686, 1692, 1697, 1702, 1707,
+ 1712, 1717, 1722, 1727, 1732, 1737, 1742, 1747, 1753, 1758,
+ 1763, 1768, 1773, 1778, 1783, 1788, 1793, 1798, 1803, 1808,
+ 1813, 1819, 1825, 1831, 1837, 1843, 1849, 1855, 1861, 1867,
+ 1873, 1879, 1885, 1891, 1897, 1903, 1909, 1915, 1921, 1927,
+ 1933, 1939, 1945, 1951, 1957, 1963, 1969, 1975, 1981, 1987,
+ 1993, 1999, 2005, 2011, 2017, 2023, 2029, 2035, 2041, 2047,
+ 2053, 2059, 2065, 2071, 2077, 2083, 2089, 2095, 2101, 2107,
+ 2113, 2119, 2125, 2131, 2137, 2143, 2149, 2155, 2161, 2167,
+ 2173, 2179, 2185, 2191, 2197, 2203, 2209, 2215, 2221, 2227,
+ 2233, 2239, 2245, 2251, 2257, 2263, 2269, 2275, 2281, 2287,
+ 2293, 2299, 2305, 2311, 2317, 2321, 2326, 2332, 2337, 2342,
+ 2347, 2352, 2357, 2362, 2368, 2373, 2378, 2383, 2388, 2393,
+ 2399, 2405, 2411, 2417, 2423, 2429, 2435, 2441, 2447, 2453,
+ 2459, 2465, 2471, 2477, 2482, 2487, 2492, 2497, 2502, 2507,
+ 2513, 2518, 2523, 2528, 2533, 2538, 2543, 2548, 2554, 2559,
+ 2564, 2569, 2574, 2579, 2584, 2589, 2594, 2599, 2604, 2609,
+ 2614, 2619, 2624, 2630, 2635, 2640, 2646, 2652, 2657, 2662,
+ 2667, 2673, 2678, 2683, 2688, 2694, 2699, 2704, 2709, 2715,
+ 2720, 2725, 2730, 2736, 2742, 2748, 2754, 2759, 2765, 2771,
+ 2777, 2782, 2787, 2792, 2797, 2802, 2808, 2813, 2818, 2823,
+ 2829, 2834, 2839, 2844, 2850, 2855, 2860, 2865, 2871, 2876,
+ 2881, 2886, 2892, 2897, 2902, 2907, 2913, 2918, 2923, 2928,
+ 2934, 2939, 2944, 2949, 2955, 2960, 2965, 2970, 2976, 2981,
+ 2986, 2991, 2997, 3002, 3007, 3012, 3018, 3023, 3028, 3033,
+ 3039, 3044, 3049, 3054, 3060, 3065, 3070, 3075, 3081, 3086,
+ 3091, 3096, 3102, 3107, 3112, 3118, 3124, 3130, 3136, 3143,
+ 3150, 3156, 3162, 3168, 3174, 3180, 3186, 3193, 3198, 3214,
+ 3219, 3224, 3232, 3232, 3243, 3243, 3253, 3256, 3269, 3291,
+ 3318, 3322, 3328, 3333, 3344, 3348, 3354, 3365, 3368, 3375,
+ 3379, 3380, 3386, 3387, 3388, 3389, 3390, 3391, 3392, 3394,
+ 3400, 3409, 3410, 3414, 3410, 3426, 3427, 3431, 3431, 3438,
+ 3438, 3452, 3455, 3463, 3471, 3482, 3483, 3487, 3491, 3498,
+ 3505, 3509, 3517, 3521, 3534, 3538, 3545, 3545, 3565, 3568,
+ 3574, 3586, 3598, 3602, 3609, 3609, 3624, 3624, 3640, 3640,
+ 3661, 3664, 3670, 3673, 3679, 3683, 3690, 3695, 3700, 3707,
+ 3710, 3719, 3723, 3732, 3735, 3739, 3748, 3748, 3771, 3777,
+ 3780, 3785, 3788
};
#endif
@@ -1007,69 +1011,49 @@ static const yytype_uint16 yyrline[] =
First, the terminals, then, starting at YYNTOKENS, nonterminals. */
static const char *const yytname[] =
{
- "$end", "error", "$undefined", "ATTRIBUTE", "VARYING", "FLOAT16_T",
- "FLOAT", "FLOAT32_T", "DOUBLE", "FLOAT64_T", "CONST", "BOOL", "INT",
- "UINT", "INT64_T", "UINT64_T", "INT32_T", "UINT32_T", "INT16_T",
- "UINT16_T", "INT8_T", "UINT8_T", "BREAK", "CONTINUE", "DO", "ELSE",
- "FOR", "IF", "DISCARD", "RETURN", "SWITCH", "CASE", "DEFAULT",
- "SUBROUTINE", "BVEC2", "BVEC3", "BVEC4", "IVEC2", "IVEC3", "IVEC4",
- "UVEC2", "UVEC3", "UVEC4", "I64VEC2", "I64VEC3", "I64VEC4", "U64VEC2",
- "U64VEC3", "U64VEC4", "I32VEC2", "I32VEC3", "I32VEC4", "U32VEC2",
- "U32VEC3", "U32VEC4", "I16VEC2", "I16VEC3", "I16VEC4", "U16VEC2",
- "U16VEC3", "U16VEC4", "I8VEC2", "I8VEC3", "I8VEC4", "U8VEC2", "U8VEC3",
- "U8VEC4", "VEC2", "VEC3", "VEC4", "MAT2", "MAT3", "MAT4", "CENTROID",
- "IN", "OUT", "INOUT", "UNIFORM", "PATCH", "SAMPLE", "BUFFER", "SHARED",
- "NONUNIFORM", "PAYLOADNV", "PAYLOADINNV", "HITATTRNV", "CALLDATANV",
- "CALLDATAINNV", "COHERENT", "VOLATILE", "RESTRICT", "READONLY",
- "WRITEONLY", "DEVICECOHERENT", "QUEUEFAMILYCOHERENT",
- "WORKGROUPCOHERENT", "SUBGROUPCOHERENT", "NONPRIVATE", "DVEC2", "DVEC3",
- "DVEC4", "DMAT2", "DMAT3", "DMAT4", "F16VEC2", "F16VEC3", "F16VEC4",
- "F16MAT2", "F16MAT3", "F16MAT4", "F32VEC2", "F32VEC3", "F32VEC4",
- "F32MAT2", "F32MAT3", "F32MAT4", "F64VEC2", "F64VEC3", "F64VEC4",
- "F64MAT2", "F64MAT3", "F64MAT4", "NOPERSPECTIVE", "FLAT", "SMOOTH",
- "LAYOUT", "EXPLICITINTERPAMD", "PERVERTEXNV", "PERPRIMITIVENV",
- "PERVIEWNV", "PERTASKNV", "MAT2X2", "MAT2X3", "MAT2X4", "MAT3X2",
- "MAT3X3", "MAT3X4", "MAT4X2", "MAT4X3", "MAT4X4", "DMAT2X2", "DMAT2X3",
- "DMAT2X4", "DMAT3X2", "DMAT3X3", "DMAT3X4", "DMAT4X2", "DMAT4X3",
- "DMAT4X4", "F16MAT2X2", "F16MAT2X3", "F16MAT2X4", "F16MAT3X2",
- "F16MAT3X3", "F16MAT3X4", "F16MAT4X2", "F16MAT4X3", "F16MAT4X4",
- "F32MAT2X2", "F32MAT2X3", "F32MAT2X4", "F32MAT3X2", "F32MAT3X3",
- "F32MAT3X4", "F32MAT4X2", "F32MAT4X3", "F32MAT4X4", "F64MAT2X2",
- "F64MAT2X3", "F64MAT2X4", "F64MAT3X2", "F64MAT3X3", "F64MAT3X4",
- "F64MAT4X2", "F64MAT4X3", "F64MAT4X4", "ATOMIC_UINT", "ACCSTRUCTNV",
- "FCOOPMATNV", "SAMPLER1D", "SAMPLER2D", "SAMPLER3D", "SAMPLERCUBE",
- "SAMPLER1DSHADOW", "SAMPLER2DSHADOW", "SAMPLERCUBESHADOW",
- "SAMPLER1DARRAY", "SAMPLER2DARRAY", "SAMPLER1DARRAYSHADOW",
- "SAMPLER2DARRAYSHADOW", "ISAMPLER1D", "ISAMPLER2D", "ISAMPLER3D",
- "ISAMPLERCUBE", "ISAMPLER1DARRAY", "ISAMPLER2DARRAY", "USAMPLER1D",
- "USAMPLER2D", "USAMPLER3D", "USAMPLERCUBE", "USAMPLER1DARRAY",
- "USAMPLER2DARRAY", "SAMPLER2DRECT", "SAMPLER2DRECTSHADOW",
- "ISAMPLER2DRECT", "USAMPLER2DRECT", "SAMPLERBUFFER", "ISAMPLERBUFFER",
- "USAMPLERBUFFER", "SAMPLERCUBEARRAY", "SAMPLERCUBEARRAYSHADOW",
- "ISAMPLERCUBEARRAY", "USAMPLERCUBEARRAY", "SAMPLER2DMS", "ISAMPLER2DMS",
- "USAMPLER2DMS", "SAMPLER2DMSARRAY", "ISAMPLER2DMSARRAY",
+ "$end", "error", "$undefined", "CONST", "BOOL", "INT", "UINT", "FLOAT",
+ "BVEC2", "BVEC3", "BVEC4", "IVEC2", "IVEC3", "IVEC4", "UVEC2", "UVEC3",
+ "UVEC4", "VEC2", "VEC3", "VEC4", "MAT2", "MAT3", "MAT4", "MAT2X2",
+ "MAT2X3", "MAT2X4", "MAT3X2", "MAT3X3", "MAT3X4", "MAT4X2", "MAT4X3",
+ "MAT4X4", "SAMPLER2D", "SAMPLER3D", "SAMPLERCUBE", "SAMPLER2DSHADOW",
+ "SAMPLERCUBESHADOW", "SAMPLER2DARRAY", "SAMPLER2DARRAYSHADOW",
+ "ISAMPLER2D", "ISAMPLER3D", "ISAMPLERCUBE", "ISAMPLER2DARRAY",
+ "USAMPLER2D", "USAMPLER3D", "USAMPLERCUBE", "USAMPLER2DARRAY", "SAMPLER",
+ "SAMPLERSHADOW", "TEXTURE2D", "TEXTURE3D", "TEXTURECUBE",
+ "TEXTURE2DARRAY", "ITEXTURE2D", "ITEXTURE3D", "ITEXTURECUBE",
+ "ITEXTURE2DARRAY", "UTEXTURE2D", "UTEXTURE3D", "UTEXTURECUBE",
+ "UTEXTURE2DARRAY", "ATTRIBUTE", "VARYING", "FLOAT16_T", "FLOAT32_T",
+ "DOUBLE", "FLOAT64_T", "INT64_T", "UINT64_T", "INT32_T", "UINT32_T",
+ "INT16_T", "UINT16_T", "INT8_T", "UINT8_T", "I64VEC2", "I64VEC3",
+ "I64VEC4", "U64VEC2", "U64VEC3", "U64VEC4", "I32VEC2", "I32VEC3",
+ "I32VEC4", "U32VEC2", "U32VEC3", "U32VEC4", "I16VEC2", "I16VEC3",
+ "I16VEC4", "U16VEC2", "U16VEC3", "U16VEC4", "I8VEC2", "I8VEC3", "I8VEC4",
+ "U8VEC2", "U8VEC3", "U8VEC4", "DVEC2", "DVEC3", "DVEC4", "DMAT2",
+ "DMAT3", "DMAT4", "F16VEC2", "F16VEC3", "F16VEC4", "F16MAT2", "F16MAT3",
+ "F16MAT4", "F32VEC2", "F32VEC3", "F32VEC4", "F32MAT2", "F32MAT3",
+ "F32MAT4", "F64VEC2", "F64VEC3", "F64VEC4", "F64MAT2", "F64MAT3",
+ "F64MAT4", "DMAT2X2", "DMAT2X3", "DMAT2X4", "DMAT3X2", "DMAT3X3",
+ "DMAT3X4", "DMAT4X2", "DMAT4X3", "DMAT4X4", "F16MAT2X2", "F16MAT2X3",
+ "F16MAT2X4", "F16MAT3X2", "F16MAT3X3", "F16MAT3X4", "F16MAT4X2",
+ "F16MAT4X3", "F16MAT4X4", "F32MAT2X2", "F32MAT2X3", "F32MAT2X4",
+ "F32MAT3X2", "F32MAT3X3", "F32MAT3X4", "F32MAT4X2", "F32MAT4X3",
+ "F32MAT4X4", "F64MAT2X2", "F64MAT2X3", "F64MAT2X4", "F64MAT3X2",
+ "F64MAT3X3", "F64MAT3X4", "F64MAT4X2", "F64MAT4X3", "F64MAT4X4",
+ "ATOMIC_UINT", "ACCSTRUCTNV", "FCOOPMATNV", "ICOOPMATNV", "UCOOPMATNV",
+ "SAMPLERCUBEARRAY", "SAMPLERCUBEARRAYSHADOW", "ISAMPLERCUBEARRAY",
+ "USAMPLERCUBEARRAY", "SAMPLER1D", "SAMPLER1DARRAY",
+ "SAMPLER1DARRAYSHADOW", "ISAMPLER1D", "SAMPLER1DSHADOW", "SAMPLER2DRECT",
+ "SAMPLER2DRECTSHADOW", "ISAMPLER2DRECT", "USAMPLER2DRECT",
+ "SAMPLERBUFFER", "ISAMPLERBUFFER", "USAMPLERBUFFER", "SAMPLER2DMS",
+ "ISAMPLER2DMS", "USAMPLER2DMS", "SAMPLER2DMSARRAY", "ISAMPLER2DMSARRAY",
"USAMPLER2DMSARRAY", "SAMPLEREXTERNALOES", "SAMPLEREXTERNAL2DY2YEXT",
- "F16SAMPLER1D", "F16SAMPLER2D", "F16SAMPLER3D", "F16SAMPLER2DRECT",
- "F16SAMPLERCUBE", "F16SAMPLER1DARRAY", "F16SAMPLER2DARRAY",
- "F16SAMPLERCUBEARRAY", "F16SAMPLERBUFFER", "F16SAMPLER2DMS",
- "F16SAMPLER2DMSARRAY", "F16SAMPLER1DSHADOW", "F16SAMPLER2DSHADOW",
- "F16SAMPLER1DARRAYSHADOW", "F16SAMPLER2DARRAYSHADOW",
- "F16SAMPLER2DRECTSHADOW", "F16SAMPLERCUBESHADOW",
- "F16SAMPLERCUBEARRAYSHADOW", "SAMPLER", "SAMPLERSHADOW", "TEXTURE1D",
- "TEXTURE2D", "TEXTURE3D", "TEXTURECUBE", "TEXTURE1DARRAY",
- "TEXTURE2DARRAY", "ITEXTURE1D", "ITEXTURE2D", "ITEXTURE3D",
- "ITEXTURECUBE", "ITEXTURE1DARRAY", "ITEXTURE2DARRAY", "UTEXTURE1D",
- "UTEXTURE2D", "UTEXTURE3D", "UTEXTURECUBE", "UTEXTURE1DARRAY",
- "UTEXTURE2DARRAY", "TEXTURE2DRECT", "ITEXTURE2DRECT", "UTEXTURE2DRECT",
- "TEXTUREBUFFER", "ITEXTUREBUFFER", "UTEXTUREBUFFER", "TEXTURECUBEARRAY",
- "ITEXTURECUBEARRAY", "UTEXTURECUBEARRAY", "TEXTURE2DMS", "ITEXTURE2DMS",
- "UTEXTURE2DMS", "TEXTURE2DMSARRAY", "ITEXTURE2DMSARRAY",
- "UTEXTURE2DMSARRAY", "F16TEXTURE1D", "F16TEXTURE2D", "F16TEXTURE3D",
- "F16TEXTURE2DRECT", "F16TEXTURECUBE", "F16TEXTURE1DARRAY",
- "F16TEXTURE2DARRAY", "F16TEXTURECUBEARRAY", "F16TEXTUREBUFFER",
- "F16TEXTURE2DMS", "F16TEXTURE2DMSARRAY", "SUBPASSINPUT",
- "SUBPASSINPUTMS", "ISUBPASSINPUT", "ISUBPASSINPUTMS", "USUBPASSINPUT",
- "USUBPASSINPUTMS", "F16SUBPASSINPUT", "F16SUBPASSINPUTMS", "IMAGE1D",
+ "ISAMPLER1DARRAY", "USAMPLER1D", "USAMPLER1DARRAY", "F16SAMPLER1D",
+ "F16SAMPLER2D", "F16SAMPLER3D", "F16SAMPLER2DRECT", "F16SAMPLERCUBE",
+ "F16SAMPLER1DARRAY", "F16SAMPLER2DARRAY", "F16SAMPLERCUBEARRAY",
+ "F16SAMPLERBUFFER", "F16SAMPLER2DMS", "F16SAMPLER2DMSARRAY",
+ "F16SAMPLER1DSHADOW", "F16SAMPLER2DSHADOW", "F16SAMPLER1DARRAYSHADOW",
+ "F16SAMPLER2DARRAYSHADOW", "F16SAMPLER2DRECTSHADOW",
+ "F16SAMPLERCUBESHADOW", "F16SAMPLERCUBEARRAYSHADOW", "IMAGE1D",
"IIMAGE1D", "UIMAGE1D", "IMAGE2D", "IIMAGE2D", "UIMAGE2D", "IMAGE3D",
"IIMAGE3D", "UIMAGE3D", "IMAGE2DRECT", "IIMAGE2DRECT", "UIMAGE2DRECT",
"IMAGECUBE", "IIMAGECUBE", "UIMAGECUBE", "IMAGEBUFFER", "IIMAGEBUFFER",
@@ -1080,11 +1064,18 @@ static const char *const yytname[] =
"F16IMAGE1D", "F16IMAGE2D", "F16IMAGE3D", "F16IMAGE2DRECT",
"F16IMAGECUBE", "F16IMAGE1DARRAY", "F16IMAGE2DARRAY",
"F16IMAGECUBEARRAY", "F16IMAGEBUFFER", "F16IMAGE2DMS",
- "F16IMAGE2DMSARRAY", "STRUCT", "VOID", "WHILE", "IDENTIFIER",
- "TYPE_NAME", "FLOATCONSTANT", "DOUBLECONSTANT", "INT16CONSTANT",
- "UINT16CONSTANT", "INT32CONSTANT", "UINT32CONSTANT", "INTCONSTANT",
- "UINTCONSTANT", "INT64CONSTANT", "UINT64CONSTANT", "BOOLCONSTANT",
- "FLOAT16CONSTANT", "LEFT_OP", "RIGHT_OP", "INC_OP", "DEC_OP", "LE_OP",
+ "F16IMAGE2DMSARRAY", "TEXTURECUBEARRAY", "ITEXTURECUBEARRAY",
+ "UTEXTURECUBEARRAY", "TEXTURE1D", "ITEXTURE1D", "UTEXTURE1D",
+ "TEXTURE1DARRAY", "ITEXTURE1DARRAY", "UTEXTURE1DARRAY", "TEXTURE2DRECT",
+ "ITEXTURE2DRECT", "UTEXTURE2DRECT", "TEXTUREBUFFER", "ITEXTUREBUFFER",
+ "UTEXTUREBUFFER", "TEXTURE2DMS", "ITEXTURE2DMS", "UTEXTURE2DMS",
+ "TEXTURE2DMSARRAY", "ITEXTURE2DMSARRAY", "UTEXTURE2DMSARRAY",
+ "F16TEXTURE1D", "F16TEXTURE2D", "F16TEXTURE3D", "F16TEXTURE2DRECT",
+ "F16TEXTURECUBE", "F16TEXTURE1DARRAY", "F16TEXTURE2DARRAY",
+ "F16TEXTURECUBEARRAY", "F16TEXTUREBUFFER", "F16TEXTURE2DMS",
+ "F16TEXTURE2DMSARRAY", "SUBPASSINPUT", "SUBPASSINPUTMS", "ISUBPASSINPUT",
+ "ISUBPASSINPUTMS", "USUBPASSINPUT", "USUBPASSINPUTMS", "F16SUBPASSINPUT",
+ "F16SUBPASSINPUTMS", "LEFT_OP", "RIGHT_OP", "INC_OP", "DEC_OP", "LE_OP",
"GE_OP", "EQ_OP", "NE_OP", "AND_OP", "OR_OP", "XOR_OP", "MUL_ASSIGN",
"DIV_ASSIGN", "ADD_ASSIGN", "MOD_ASSIGN", "LEFT_ASSIGN", "RIGHT_ASSIGN",
"AND_ASSIGN", "XOR_ASSIGN", "OR_ASSIGN", "SUB_ASSIGN", "LEFT_PAREN",
@@ -1092,11 +1083,24 @@ static const char *const yytname[] =
"RIGHT_BRACE", "DOT", "COMMA", "COLON", "EQUAL", "SEMICOLON", "BANG",
"DASH", "TILDE", "PLUS", "STAR", "SLASH", "PERCENT", "LEFT_ANGLE",
"RIGHT_ANGLE", "VERTICAL_BAR", "CARET", "AMPERSAND", "QUESTION",
- "INVARIANT", "PRECISE", "HIGH_PRECISION", "MEDIUM_PRECISION",
- "LOW_PRECISION", "PRECISION", "PACKED", "RESOURCE", "SUPERP", "$accept",
- "variable_identifier", "primary_expression", "postfix_expression",
- "integer_expression", "function_call", "function_call_or_method",
- "function_call_generic", "function_call_header_no_parameters",
+ "INVARIANT", "HIGH_PRECISION", "MEDIUM_PRECISION", "LOW_PRECISION",
+ "PRECISION", "PACKED", "RESOURCE", "SUPERP", "FLOATCONSTANT",
+ "INTCONSTANT", "UINTCONSTANT", "BOOLCONSTANT", "IDENTIFIER", "TYPE_NAME",
+ "CENTROID", "IN", "OUT", "INOUT", "STRUCT", "VOID", "WHILE", "BREAK",
+ "CONTINUE", "DO", "ELSE", "FOR", "IF", "DISCARD", "RETURN", "SWITCH",
+ "CASE", "DEFAULT", "UNIFORM", "SHARED", "BUFFER", "FLAT", "SMOOTH",
+ "LAYOUT", "DOUBLECONSTANT", "INT16CONSTANT", "UINT16CONSTANT",
+ "FLOAT16CONSTANT", "INT32CONSTANT", "UINT32CONSTANT", "INT64CONSTANT",
+ "UINT64CONSTANT", "SUBROUTINE", "DEMOTE", "PAYLOADNV", "PAYLOADINNV",
+ "HITATTRNV", "CALLDATANV", "CALLDATAINNV", "PATCH", "SAMPLE",
+ "NONUNIFORM", "COHERENT", "VOLATILE", "RESTRICT", "READONLY",
+ "WRITEONLY", "DEVICECOHERENT", "QUEUEFAMILYCOHERENT",
+ "WORKGROUPCOHERENT", "SUBGROUPCOHERENT", "NONPRIVATE", "NOPERSPECTIVE",
+ "EXPLICITINTERPAMD", "PERVERTEXNV", "PERPRIMITIVENV", "PERVIEWNV",
+ "PERTASKNV", "PRECISE", "$accept", "variable_identifier",
+ "primary_expression", "postfix_expression", "integer_expression",
+ "function_call", "function_call_or_method", "function_call_generic",
+ "function_call_header_no_parameters",
"function_call_header_with_parameters", "function_call_header",
"function_identifier", "unary_expression", "unary_operator",
"multiplicative_expression", "additive_expression", "shift_expression",
@@ -1120,8 +1124,8 @@ static const char *const yytname[] =
"$@3", "$@4", "struct_declaration_list", "struct_declaration",
"struct_declarator_list", "struct_declarator", "initializer",
"initializer_list", "declaration_statement", "statement",
- "simple_statement", "compound_statement", "$@5", "$@6",
- "statement_no_new_scope", "statement_scoped", "$@7", "$@8",
+ "simple_statement", "demote_statement", "compound_statement", "$@5",
+ "$@6", "statement_no_new_scope", "statement_scoped", "$@7", "$@8",
"compound_statement_no_new_scope", "statement_list",
"expression_statement", "selection_statement",
"selection_statement_nonattributed", "selection_rest_statement",
@@ -1180,16 +1184,17 @@ static const yytype_uint16 yytoknum[] =
625, 626, 627, 628, 629, 630, 631, 632, 633, 634,
635, 636, 637, 638, 639, 640, 641, 642, 643, 644,
645, 646, 647, 648, 649, 650, 651, 652, 653, 654,
- 655, 656, 657, 658, 659, 660, 661, 662
+ 655, 656, 657, 658, 659, 660, 661, 662, 663, 664,
+ 665
};
# endif
-#define YYPACT_NINF -659
+#define YYPACT_NINF -453
#define yypact_value_is_default(Yystate) \
- (!!((Yystate) == (-659)))
+ (!!((Yystate) == (-453)))
-#define YYTABLE_NINF -524
+#define YYTABLE_NINF -528
#define yytable_value_is_error(Yytable_value) \
0
@@ -1198,79 +1203,79 @@ static const yytype_uint16 yytoknum[] =
STATE-NUM. */
static const yytype_int16 yypact[] =
{
- 3535, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -331, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -324, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -319, -659, -659, -659, -659, -659,
- -659, -659, -659, -256, -659, -314, -351, -309, -306, 5942,
- -257, -659, -217, -659, -659, -659, -659, 4338, -659, -659,
- -659, -659, -241, -659, -659, 721, -659, -659, -204, -71,
- -219, -659, 9007, -349, -659, -659, -215, -659, 5942, -659,
- -659, -659, 5942, -178, -172, -659, -337, -267, -659, -659,
- -659, 8237, -207, -659, -659, -659, -659, -341, -659, -211,
- -330, -659, -659, 5942, -210, 6697, -659, -322, 1123, -659,
- -659, -659, -659, -207, -328, -659, 7082, -304, -659, -163,
- -659, -252, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -659, -659, -659, 8237, 8237, 8237, -659, -659,
- -659, -659, -659, -659, -303, -659, -659, -659, -196, -299,
- 8622, -194, -659, 8237, -659, -659, -355, -195, -659, -157,
- 8237, -659, -71, 5942, 5942, -155, 4739, -659, -659, -659,
- -659, -242, -236, -249, -335, -206, -191, -187, -209, -149,
- -150, -333, -162, 7467, -659, -170, -168, -659, -154, -153,
- -167, 7852, -152, 8237, -159, -148, -151, -160, -659, -659,
- -274, -659, -659, -251, -659, -351, -147, -144, -659, -659,
- -659, -659, 1525, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -19, -195, 7082, -302, 7082, -659, -659, 7082,
- 5942, -659, -115, -659, -659, -659, -292, -659, -659, 8237,
- -108, -659, -659, 8237, -143, -659, -659, -659, 8237, -659,
- -659, -659, -659, -659, 5140, -155, -207, -250, -659, -659,
- -659, 8237, 8237, 8237, 8237, 8237, 8237, 8237, 8237, 8237,
- 8237, 8237, 8237, 8237, 8237, 8237, 8237, 8237, 8237, 8237,
- -659, -659, -659, -142, -659, -659, 1927, -659, 8237, -659,
- -659, -245, 8237, -226, -659, -659, -106, -659, 1927, -659,
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- 8237, 8237, -659, -659, -659, -659, -659, -659, -659, 7082,
- -659, -238, -659, 5541, -659, -659, -141, -140, -659, -659,
- -659, -659, -244, -195, -155, -659, -659, -659, -659, -242,
- -242, -236, -236, -249, -249, -249, -249, -335, -335, -206,
- -191, -187, -209, -149, -150, 8237, -659, -104, 3133, -263,
- -659, -260, -659, 3937, -136, -297, -659, 1927, -659, -659,
- -659, -659, 6312, -659, -659, -659, -659, -224, -135, -659,
- -659, 3937, -138, -659, -140, -97, 5942, -132, 8237, -133,
- -106, -134, -659, -659, 8237, 8237, -659, -137, -129, 224,
- -128, 2731, -659, -127, -131, 2329, -126, -659, -659, -659,
- -659, -255, 8237, 2329, -138, -659, -659, 1927, 7082, -659,
- -659, -659, -659, -130, -140, -659, -659, 1927, -123, -659,
- -659, -659
+ 3994, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, 97, -453, -453, -453,
+ -453, -453, 6, -453, -453, -453, -453, -453, -453, -307,
+ -241, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -3, 95, 36,
+ 125, 6034, 82, -453, -22, -453, -453, -453, -453, 4402,
+ -453, -453, -453, -453, 131, -453, -453, 730, -453, -453,
+ 11, -453, 153, -28, 127, -453, 7, -453, 157, -453,
+ 6034, -453, -453, -453, 6034, 129, 134, -453, 13, -453,
+ 73, -453, -453, 8391, 162, -453, -453, -453, 161, 6034,
+ -453, 163, -453, -309, -453, -453, 27, 6831, -453, 16,
+ 1138, -453, -453, -453, -453, 162, 23, -453, 7221, 49,
+ -453, 138, -453, 87, 8391, 8391, 8391, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, 68, -453, -453, -453,
+ 174, 60, 8781, 176, -453, 8391, -453, -453, -320, 175,
+ -453, 6034, 142, 4810, -453, 6034, 8391, -453, -28, -453,
+ 143, -453, -453, 119, 128, 32, 21, 38, 158, 160,
+ 165, 195, 194, 18, 183, 7611, -453, 185, 184, -453,
+ -453, 188, 180, 181, -453, 196, 197, 190, 8001, 198,
+ 8391, 187, 193, 122, -453, -453, 91, -453, 95, 204,
+ 205, -453, -453, -453, -453, -453, 1546, -453, -453, -453,
+ -453, -453, -453, -453, -453, -453, -353, 175, 7221, 69,
+ 7221, -453, -453, 7221, 6034, -453, 170, -453, -453, -453,
+ 78, -453, -453, 8391, 171, -453, -453, 8391, 207, -453,
+ -453, -453, 8391, -453, 142, 162, 93, -453, -453, -453,
+ 5218, -453, -453, -453, -453, 8391, 8391, 8391, 8391, 8391,
+ 8391, 8391, 8391, 8391, 8391, 8391, 8391, 8391, 8391, 8391,
+ 8391, 8391, 8391, 8391, -453, -453, -453, 206, 177, -453,
+ 1954, -453, -453, -453, 1954, -453, 8391, -453, -453, 100,
+ 8391, 144, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, -453, -453, -453, 8391, 8391, -453, -453, -453,
+ -453, -453, -453, -453, 7221, -453, 140, -453, 5626, -453,
+ -453, 209, 208, -453, -453, -453, 123, 175, 142, -453,
+ -453, -453, -453, -453, 119, 119, 128, 128, 32, 32,
+ 32, 32, 21, 21, 38, 158, 160, 165, 195, 194,
+ 8391, -453, 214, 56, -453, 1954, 3586, 172, 3178, 80,
+ -453, 81, -453, -453, -453, -453, -453, 6441, -453, -453,
+ -453, -453, 146, 8391, 215, 177, 212, 208, 186, 6034,
+ 219, 221, -453, -453, 3586, 220, -453, -453, -453, 8391,
+ 222, -453, -453, -453, 216, 2362, 8391, -453, 217, 227,
+ 182, 225, 2770, -453, 229, -453, -453, 7221, -453, -453,
+ -453, 89, 8391, 2362, 220, -453, -453, 1954, -453, 224,
+ 208, -453, -453, 1954, 226, -453, -453
};
/* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM.
@@ -1278,111 +1283,113 @@ static const yytype_int16 yypact[] =
means the default is an error. */
static const yytype_uint16 yydefact[] =
{
- 0, 157, 158, 202, 200, 203, 201, 204, 156, 215,
- 205, 206, 213, 214, 211, 212, 209, 210, 207, 208,
- 183, 231, 232, 233, 234, 235, 236, 249, 250, 251,
- 246, 247, 248, 261, 262, 263, 243, 244, 245, 258,
- 259, 260, 240, 241, 242, 255, 256, 257, 237, 238,
- 239, 252, 253, 254, 216, 217, 218, 264, 265, 266,
- 162, 160, 161, 159, 165, 163, 164, 166, 172, 185,
- 168, 169, 167, 170, 171, 173, 179, 180, 181, 182,
- 174, 175, 176, 177, 178, 219, 220, 221, 276, 277,
- 278, 222, 223, 224, 288, 289, 290, 225, 226, 227,
- 300, 301, 302, 228, 229, 230, 312, 313, 314, 134,
- 133, 132, 0, 135, 136, 137, 138, 139, 267, 268,
- 269, 270, 271, 272, 273, 274, 275, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 315, 316, 317, 318, 319, 320,
- 321, 322, 323, 325, 324, 484, 326, 327, 328, 329,
- 330, 331, 332, 333, 334, 335, 336, 352, 353, 354,
- 355, 356, 357, 359, 360, 361, 362, 363, 364, 366,
- 367, 370, 371, 372, 374, 375, 337, 338, 358, 365,
- 376, 378, 379, 380, 382, 383, 474, 475, 339, 340,
- 341, 368, 342, 346, 347, 350, 373, 377, 381, 343,
- 344, 348, 349, 369, 345, 351, 384, 385, 386, 388,
- 390, 392, 394, 396, 400, 401, 402, 403, 404, 405,
- 407, 408, 409, 410, 411, 412, 414, 416, 417, 418,
- 420, 421, 398, 406, 413, 422, 424, 425, 426, 428,
- 429, 387, 389, 391, 415, 393, 395, 397, 399, 419,
- 423, 427, 476, 477, 480, 481, 482, 483, 478, 479,
- 430, 432, 433, 434, 436, 437, 438, 440, 441, 442,
- 444, 445, 446, 448, 449, 450, 452, 453, 454, 456,
- 457, 458, 460, 461, 462, 464, 465, 466, 468, 469,
- 470, 472, 473, 431, 435, 439, 443, 447, 455, 459,
- 463, 451, 467, 471, 0, 199, 486, 571, 131, 146,
- 487, 488, 489, 0, 570, 0, 572, 0, 108, 107,
- 0, 119, 124, 153, 152, 150, 154, 0, 147, 149,
- 155, 129, 195, 151, 485, 0, 567, 569, 0, 0,
- 0, 492, 0, 0, 96, 93, 0, 106, 0, 115,
- 109, 117, 0, 118, 0, 94, 125, 0, 99, 148,
- 130, 0, 188, 194, 1, 568, 186, 0, 145, 143,
- 0, 141, 490, 0, 0, 0, 97, 0, 0, 573,
- 110, 114, 116, 112, 120, 111, 0, 126, 102, 0,
- 100, 0, 2, 12, 13, 10, 11, 4, 5, 6,
- 7, 8, 9, 15, 14, 0, 0, 0, 42, 41,
- 43, 40, 3, 17, 36, 19, 24, 25, 0, 0,
- 29, 0, 197, 0, 35, 33, 0, 189, 184, 0,
- 0, 140, 0, 0, 0, 0, 0, 494, 95, 190,
- 44, 48, 51, 54, 59, 62, 64, 66, 68, 70,
- 72, 74, 0, 0, 98, 0, 0, 552, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 518, 527, 531,
- 44, 77, 90, 0, 507, 0, 155, 129, 510, 529,
- 509, 508, 0, 511, 512, 533, 513, 540, 514, 515,
- 548, 516, 0, 113, 0, 121, 0, 502, 128, 0,
- 0, 104, 0, 101, 37, 38, 0, 21, 22, 0,
- 0, 27, 26, 0, 199, 30, 32, 39, 0, 196,
- 187, 92, 144, 142, 0, 0, 500, 0, 498, 493,
- 495, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 156, 203, 201, 202, 200, 207, 208, 209, 210,
+ 211, 212, 213, 214, 215, 204, 205, 206, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 327, 328, 329, 330, 331, 332, 333, 353, 354, 355,
+ 356, 357, 358, 359, 368, 381, 382, 369, 370, 372,
+ 371, 373, 374, 375, 376, 377, 378, 379, 380, 164,
+ 165, 229, 230, 228, 231, 238, 239, 236, 237, 234,
+ 235, 232, 233, 261, 262, 263, 273, 274, 275, 258,
+ 259, 260, 270, 271, 272, 255, 256, 257, 267, 268,
+ 269, 252, 253, 254, 264, 265, 266, 240, 241, 242,
+ 276, 277, 278, 243, 244, 245, 288, 289, 290, 246,
+ 247, 248, 300, 301, 302, 249, 250, 251, 312, 313,
+ 314, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 291, 292, 293, 294, 295, 296, 297, 298, 299, 303,
+ 304, 305, 306, 307, 308, 309, 310, 311, 315, 316,
+ 317, 318, 319, 320, 321, 322, 323, 325, 324, 484,
+ 485, 486, 337, 338, 361, 364, 326, 335, 336, 352,
+ 334, 383, 384, 387, 388, 389, 391, 392, 393, 395,
+ 396, 397, 399, 400, 474, 475, 360, 362, 363, 339,
+ 340, 341, 385, 342, 346, 347, 350, 390, 394, 398,
+ 343, 344, 348, 349, 386, 345, 351, 430, 432, 433,
+ 434, 436, 437, 438, 440, 441, 442, 444, 445, 446,
+ 448, 449, 450, 452, 453, 454, 456, 457, 458, 460,
+ 461, 462, 464, 465, 466, 468, 469, 470, 472, 473,
+ 431, 435, 439, 443, 447, 455, 459, 463, 451, 467,
+ 471, 365, 366, 367, 401, 410, 412, 406, 411, 413,
+ 414, 416, 417, 418, 420, 421, 422, 424, 425, 426,
+ 428, 429, 402, 403, 404, 415, 405, 407, 408, 409,
+ 419, 423, 427, 476, 477, 480, 481, 482, 483, 478,
+ 479, 575, 131, 489, 490, 491, 0, 488, 160, 158,
+ 159, 157, 0, 199, 161, 162, 163, 133, 132, 0,
+ 183, 169, 170, 168, 171, 172, 166, 167, 185, 173,
+ 179, 180, 181, 182, 174, 175, 176, 177, 178, 134,
+ 135, 136, 137, 138, 139, 146, 574, 0, 576, 0,
+ 108, 107, 0, 119, 124, 153, 152, 150, 154, 0,
+ 147, 149, 155, 129, 195, 151, 487, 0, 571, 573,
+ 0, 494, 0, 0, 0, 96, 0, 93, 0, 106,
+ 0, 115, 109, 117, 0, 118, 0, 94, 125, 99,
+ 0, 148, 130, 0, 188, 194, 1, 572, 0, 0,
+ 492, 143, 145, 0, 141, 186, 0, 0, 97, 0,
+ 0, 577, 110, 114, 116, 112, 120, 111, 0, 126,
+ 102, 0, 100, 0, 0, 0, 0, 42, 41, 43,
+ 40, 5, 6, 7, 8, 2, 15, 13, 14, 16,
+ 9, 10, 11, 12, 3, 17, 36, 19, 24, 25,
+ 0, 0, 29, 0, 197, 0, 35, 33, 0, 189,
+ 95, 0, 0, 0, 496, 0, 0, 140, 0, 184,
+ 0, 190, 44, 48, 51, 54, 59, 62, 64, 66,
+ 68, 70, 72, 74, 0, 0, 98, 0, 522, 531,
+ 535, 0, 0, 0, 556, 0, 0, 0, 0, 0,
+ 0, 0, 0, 44, 77, 90, 0, 509, 0, 155,
+ 129, 512, 533, 511, 519, 510, 0, 513, 514, 537,
+ 515, 544, 516, 517, 552, 518, 0, 113, 0, 121,
+ 0, 504, 128, 0, 0, 104, 0, 101, 37, 38,
+ 0, 21, 22, 0, 0, 27, 26, 0, 199, 30,
+ 32, 39, 0, 196, 0, 502, 0, 500, 495, 497,
+ 0, 92, 144, 142, 187, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 75, 191, 192, 0, 563, 562, 0, 554, 0, 566,
- 564, 0, 0, 0, 547, 550, 0, 517, 0, 80,
- 81, 83, 82, 85, 86, 87, 88, 89, 84, 79,
- 0, 0, 532, 528, 530, 534, 541, 549, 123, 0,
- 505, 0, 127, 0, 105, 16, 0, 23, 20, 31,
- 198, 491, 0, 501, 0, 496, 45, 46, 47, 50,
- 49, 52, 53, 57, 58, 55, 56, 60, 61, 63,
- 65, 67, 69, 71, 73, 0, 193, 0, 0, 0,
- 565, 0, 546, 0, 577, 0, 575, 519, 78, 91,
- 122, 503, 0, 103, 18, 497, 499, 0, 0, 557,
- 556, 559, 525, 542, 538, 0, 0, 0, 0, 0,
- 0, 0, 504, 506, 0, 0, 558, 0, 0, 537,
- 0, 0, 535, 0, 0, 0, 0, 574, 576, 520,
- 76, 0, 560, 0, 525, 524, 526, 544, 0, 522,
- 551, 521, 578, 0, 561, 555, 536, 545, 0, 539,
- 553, 543
+ 0, 0, 0, 0, 75, 191, 192, 0, 0, 521,
+ 0, 554, 567, 566, 0, 558, 0, 570, 568, 0,
+ 0, 0, 551, 520, 80, 81, 83, 82, 85, 86,
+ 87, 88, 89, 84, 79, 0, 0, 536, 532, 534,
+ 538, 545, 553, 123, 0, 507, 0, 127, 0, 105,
+ 4, 0, 23, 20, 31, 198, 0, 503, 0, 498,
+ 493, 45, 46, 47, 50, 49, 52, 53, 57, 58,
+ 55, 56, 60, 61, 63, 65, 67, 69, 71, 73,
+ 0, 193, 581, 0, 579, 523, 0, 0, 0, 0,
+ 569, 0, 550, 78, 91, 122, 505, 0, 103, 18,
+ 499, 501, 0, 0, 0, 0, 0, 542, 0, 0,
+ 0, 0, 561, 560, 563, 529, 546, 506, 508, 0,
+ 0, 578, 580, 524, 0, 0, 0, 562, 0, 0,
+ 541, 0, 0, 539, 0, 76, 582, 0, 526, 555,
+ 525, 0, 564, 0, 529, 528, 530, 548, 543, 0,
+ 565, 559, 540, 549, 0, 557, 547
};
/* YYPGOTO[NTERM-NUM]. */
static const yytype_int16 yypgoto[] =
{
- -659, -659, -659, -659, -659, -659, -659, -659, -659, -659,
- -659, -659, -364, -659, -389, -385, -457, -384, -310, -307,
- -305, -308, -301, -298, -659, -386, -659, -390, -659, -415,
- -418, 1, -659, -659, -659, 2, -659, -659, -659, -110,
- -105, -107, -659, -659, -628, -659, -659, -659, -659, -188,
- -659, -336, -343, -659, 6, -659, 0, -334, -659, -659,
- -659, -659, -67, -659, -659, -659, -431, -437, -277, -350,
- -501, -659, -375, -488, -658, -414, -659, -659, -428, -426,
- -659, -659, -87, -568, -368, -659, -231, -659, -388, -659,
- -230, -659, -659, -659, -659, -228, -659, -659, -659, -659,
- -659, -659, -659, -659, -70, -659, -659, -659, -659, -394
+ -453, -453, -453, -453, -453, -453, -453, -453, -453, -453,
+ -453, -453, 8696, -453, -89, -88, -122, -84, -19, -18,
+ -17, -16, -20, -15, -453, -85, -453, -98, -453, -110,
+ -119, 2, -453, -453, -453, 4, -453, -453, -453, 189,
+ 191, 192, -453, -453, -339, -453, -453, -453, -453, 98,
+ -453, -37, -44, -453, 9, -453, 0, -71, -453, -453,
+ -453, -453, 261, -453, -453, -453, -452, -137, 20, -68,
+ -209, -453, -96, -198, -326, -453, -136, -453, -453, -146,
+ -144, -453, -453, 200, -265, -87, -453, 57, -453, -112,
+ -453, 59, -453, -453, -453, -453, 61, -453, -453, -453,
+ -453, -453, -453, -453, -453, 228, -453, -453, -453, -453,
+ -99
};
/* YYDEFGOTO[NTERM-NUM]. */
static const yytype_int16 yydefgoto[] =
{
- -1, 432, 433, 434, 616, 435, 436, 437, 438, 439,
- 440, 441, 490, 443, 461, 462, 463, 464, 465, 466,
- 467, 468, 469, 470, 471, 491, 645, 492, 600, 493,
- 542, 494, 335, 520, 411, 495, 337, 338, 339, 369,
- 370, 371, 340, 341, 342, 343, 344, 345, 390, 391,
- 346, 347, 348, 349, 444, 387, 445, 397, 382, 383,
- 446, 352, 353, 354, 453, 393, 456, 457, 547, 548,
- 518, 611, 498, 499, 500, 501, 588, 681, 710, 689,
- 690, 691, 711, 502, 503, 504, 505, 692, 677, 506,
- 507, 693, 718, 508, 509, 510, 653, 576, 648, 671,
- 687, 688, 511, 355, 356, 357, 366, 512, 655, 656
+ -1, 434, 435, 436, 621, 437, 438, 439, 440, 441,
+ 442, 443, 493, 445, 463, 464, 465, 466, 467, 468,
+ 469, 470, 471, 472, 473, 494, 650, 495, 605, 496,
+ 552, 497, 337, 524, 413, 498, 339, 340, 341, 371,
+ 372, 373, 342, 343, 344, 345, 346, 347, 393, 394,
+ 348, 349, 350, 351, 446, 396, 447, 399, 384, 385,
+ 448, 354, 355, 356, 455, 389, 453, 454, 546, 547,
+ 522, 616, 501, 502, 503, 504, 505, 580, 676, 709,
+ 700, 701, 702, 710, 506, 507, 508, 509, 703, 680,
+ 510, 511, 704, 724, 512, 513, 514, 656, 584, 658,
+ 684, 698, 699, 515, 357, 358, 359, 368, 516, 653,
+ 654
};
/* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If
@@ -1390,82 +1397,245 @@ static const yytype_int16 yydefgoto[] =
number is the opposite. If YYTABLE_NINF, syntax error. */
static const yytype_int16 yytable[] =
{
- 351, 334, 336, 372, 379, 477, 350, 478, 479, 472,
- 388, 482, 526, 608, 604, 610, 517, 442, 612, 550,
- 657, 360, 544, 558, 559, 675, 363, 538, 395, 379,
- 569, 460, 372, 706, 365, 448, 396, 709, 405, 539,
- 395, 449, 407, 675, 358, 709, 451, 406, 447, 395,
- 535, 359, 452, 527, 528, 473, 514, 454, 560, 561,
- 361, 524, 525, 474, 541, 570, 581, 367, 583, 513,
- 515, 364, -34, 473, 529, 473, 368, 532, 530, 537,
- 519, 679, 609, 533, 615, 680, 460, 573, 647, 613,
- 601, 589, 590, 591, 592, 593, 594, 595, 596, 597,
- 598, 633, 634, 635, 636, 556, 557, 550, 660, 460,
- 599, 379, 408, 672, 617, 409, 673, 454, 410, 601,
- 454, 713, 601, 376, 517, 374, 517, 601, 375, 517,
- 522, 601, 624, 523, 602, 625, 386, 601, 624, 717,
- 650, 665, 661, 619, 662, 330, 331, 332, 551, 552,
- 553, 554, 381, 555, 562, 563, 601, 652, 601, 684,
- 392, 683, 403, 649, 398, 629, 630, 651, 404, 604,
- 395, 631, 632, 450, 620, 458, 550, 521, 637, 638,
- 531, 536, 473, 540, 454, 546, 566, 626, 627, 628,
- 460, 460, 460, 460, 460, 460, 460, 460, 460, 460,
- 460, 460, 460, 460, 460, 460, 564, 719, 454, 565,
- 658, 659, 623, 567, 568, 574, 571, 575, 579, 517,
- 587, 577, 578, 582, 584, 614, 586, 585, -35, 604,
- 667, -33, 618, -28, 654, 668, 646, 664, 674, 678,
- 685, -523, 601, 694, 695, 697, 699, 703, 702, 704,
- 712, 487, 707, 708, 639, 720, 674, 721, 640, 642,
- 696, 641, 401, 400, 543, 402, 362, 643, 622, 389,
- 701, 644, 517, 669, 666, 715, 705, 454, 716, 399,
- 670, 605, 606, 686, 607, 385, 698, 714, 0, 0,
- 0, 0, 541, 0, 700, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 460, 0, 0, 676, 517, 0,
- 485, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 379, 0, 676, 0, 0, 0, 373,
- 0, 0, 0, 0, 0, 350, 0, 380, 0, 0,
- 0, 0, 0, 350, 0, 351, 334, 336, 0, 0,
- 0, 350, 394, 0, 0, 0, 0, 0, 373, 0,
- 0, 0, 373, 0, 350, 0, 0, 0, 350, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 455, 0, 0, 0, 0, 497, 350,
- 0, 0, 0, 0, 496, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 455, 545, 0, 455, 0, 0, 350,
- 350, 0, 350, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 497, 0, 0, 0, 0, 0, 496, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 455, 0, 0, 0, 0, 0, 350, 0, 0, 0,
+ 353, 542, 336, 550, 338, 481, 457, 363, 484, 352,
+ 485, 486, 458, 543, 489, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 618, 364, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 374, 381, 530, 409, 609, 613,
+ 521, 615, 474, 449, 617, 655, 549, 678, 562, 563,
+ 573, 365, 391, 397, 361, 560, 561, 407, 378, 397,
+ 381, 398, 475, 374, 517, 519, 408, 566, 567, 397,
+ 476, 375, 459, 392, 539, 678, 518, 366, 460, 382,
+ 352, 369, 451, 564, 565, 574, 362, 353, 352, 336,
+ 388, 338, 297, 531, 532, 475, 352, 302, 303, 708,
+ 375, 551, 523, 674, 375, 536, 716, 675, 589, 352,
+ 591, 537, -34, 352, 533, 475, 657, 708, 534, 452,
+ 577, 410, 614, 620, 411, 685, 686, 412, 352, 606,
+ 500, 606, 606, 376, 719, 665, 377, 381, 526, 499,
+ 606, 527, 606, 549, 628, 607, 451, 629, 451, 367,
+ 521, 606, 521, 622, 660, 521, 594, 595, 596, 597,
+ 598, 599, 600, 601, 602, 603, 293, 294, 295, 624,
+ 638, 639, 640, 641, 628, 604, 370, 670, 555, 556,
+ 557, 544, 723, 452, 558, 452, 559, 609, 688, 666,
+ 352, 667, 352, 383, 352, 606, 662, 606, 689, 634,
+ 635, 390, 636, 637, 627, 400, 659, 395, 397, 405,
+ 661, 549, 642, 643, 406, 450, 456, 451, 525, 535,
+ 540, 475, 545, 554, 568, 569, 571, 572, 718, 570,
+ 575, 578, 581, 579, 582, 583, 500, 663, 664, 592,
+ 585, 586, 590, 451, 587, 499, 521, 593, -35, -33,
+ 619, 623, -28, 651, 452, 609, 669, 652, 673, 606,
+ 681, 693, 691, 352, 695, 696, 694, 706, -527, 707,
+ 672, 712, 713, 478, 714, 726, 677, 717, 725, 644,
+ 452, 645, 648, 646, 690, 647, 553, 360, 649, 352,
+ 671, 402, 682, 403, 626, 715, 404, 721, 401, 521,
+ 722, 683, 697, 610, 677, 611, 692, 612, 0, 0,
+ 500, 451, 0, 0, 500, 387, 711, 0, 551, 499,
+ 0, 705, 0, 499, 0, 0, 0, 0, 0, 0,
+ 0, 0, 720, 0, 0, 0, 0, 0, 0, 521,
+ 0, 0, 0, 0, 0, 0, 0, 0, 452, 679,
+ 0, 0, 0, 0, 0, 0, 0, 352, 0, 0,
+ 0, 0, 0, 0, 0, 381, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 679, 0, 0,
+ 0, 0, 0, 0, 0, 500, 500, 0, 500, 0,
+ 0, 0, 0, 0, 499, 499, 0, 499, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 382,
+ 0, 0, 0, 0, 500, 0, 0, 0, 352, 0,
+ 0, 0, 0, 499, 0, 500, 0, 0, 0, 0,
+ 0, 0, 500, 0, 499, 0, 0, 0, 0, 0,
+ 0, 499, 0, 500, 0, 0, 0, 500, 0, 0,
+ 0, 0, 499, 500, 0, 0, 499, 0, 0, 0,
+ 386, 0, 499, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 455, 0, 0, 0, 0, 0,
- 350, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 497, 0, 0, 0,
- 0, 0, 496, 0, 0, 0, 0, 0, 497, 0,
- 0, 0, 0, 0, 496, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 291, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 292, 293,
+ 294, 295, 296, 0, 0, 0, 0, 0, 0, 0,
+ 0, 297, 298, 299, 300, 301, 302, 303, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 455, 0, 0, 0, 0, 0, 350,
+ 304, 305, 306, 307, 308, 309, 0, 0, 0, 0,
+ 0, 0, 0, 0, 310, 0, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
+ 335, 1, 2, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 0, 0, 414, 415, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 416, 0, 477, 0, 478, 479, 0, 0,
+ 0, 0, 480, 417, 418, 419, 420, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 292, 293, 294, 295,
+ 296, 0, 0, 0, 421, 422, 423, 424, 425, 297,
+ 298, 299, 300, 301, 302, 303, 481, 482, 483, 484,
+ 0, 485, 486, 487, 488, 489, 490, 491, 304, 305,
+ 306, 307, 308, 309, 426, 427, 428, 429, 430, 431,
+ 432, 433, 310, 492, 311, 312, 313, 314, 315, 316,
+ 317, 318, 319, 320, 321, 322, 323, 324, 325, 326,
+ 327, 328, 329, 330, 331, 332, 333, 334, 335, 1,
+ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 0,
+ 0, 414, 415, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 497, 0,
- 0, 0, 0, 497, 496, 0, 0, 497, 0, 496,
- 0, 0, 0, 496, 0, 0, 0, 0, 0, 0,
- 0, 497, 0, 0, 0, 0, 380, 496, 0, 0,
- 0, 0, 350, 0, 0, 0, 0, 0, 0, 0,
- 0, 497, 0, 0, 0, 497, 0, 496, 0, 0,
- 0, 496, 0, 497, 0, 0, 0, 497, 0, 496,
- 0, 0, 0, 496, 0, 0, 0, 497, 0, 0,
- 0, 384, 0, 496, 1, 2, 3, 4, 5, 6,
- 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 20, 21, 22, 23, 24, 25,
+ 416, 0, 477, 0, 478, 608, 0, 0, 0, 0,
+ 480, 417, 418, 419, 420, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 292, 293, 294, 295, 296, 0,
+ 0, 0, 421, 422, 423, 424, 425, 297, 298, 299,
+ 300, 301, 302, 303, 481, 482, 483, 484, 0, 485,
+ 486, 487, 488, 489, 490, 491, 304, 305, 306, 307,
+ 308, 309, 426, 427, 428, 429, 430, 431, 432, 433,
+ 310, 492, 311, 312, 313, 314, 315, 316, 317, 318,
+ 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
+ 329, 330, 331, 332, 333, 334, 335, 1, 2, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
+ 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+ 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+ 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
+ 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
+ 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
+ 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
+ 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
+ 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
+ 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
+ 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
+ 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
+ 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
+ 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
+ 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
+ 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
+ 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
+ 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
+ 284, 285, 286, 287, 288, 289, 290, 0, 0, 414,
+ 415, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 416, 0,
+ 477, 0, 478, 0, 0, 0, 0, 0, 480, 417,
+ 418, 419, 420, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 292, 293, 294, 295, 296, 0, 0, 0,
+ 421, 422, 423, 424, 425, 297, 298, 299, 300, 301,
+ 302, 303, 481, 482, 483, 484, 0, 485, 486, 487,
+ 488, 489, 490, 491, 304, 305, 306, 307, 308, 309,
+ 426, 427, 428, 429, 430, 431, 432, 433, 310, 492,
+ 311, 312, 313, 314, 315, 316, 317, 318, 319, 320,
+ 321, 322, 323, 324, 325, 326, 327, 328, 329, 330,
+ 331, 332, 333, 334, 335, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
@@ -1492,20 +1662,143 @@ static const yytype_int16 yytable[] =
256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 325,
- 0, 0, 326, 0, 0, 0, 0, 0, 0, 0,
+ 286, 287, 288, 289, 290, 0, 0, 414, 415, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 416, 0, 477, 0,
+ 400, 0, 0, 0, 0, 0, 480, 417, 418, 419,
+ 420, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 292, 293, 294, 295, 296, 0, 0, 0, 421, 422,
+ 423, 424, 425, 297, 298, 299, 300, 301, 302, 303,
+ 481, 482, 483, 484, 0, 485, 486, 487, 488, 489,
+ 490, 491, 304, 305, 306, 307, 308, 309, 426, 427,
+ 428, 429, 430, 431, 432, 433, 310, 492, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
+ 333, 334, 335, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 0, 0, 414, 415, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 416, 0, 477, 0, 0, 0,
+ 0, 0, 0, 0, 480, 417, 418, 419, 420, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 292, 293,
+ 294, 295, 296, 0, 0, 0, 421, 422, 423, 424,
+ 425, 297, 298, 299, 300, 301, 302, 303, 481, 482,
+ 483, 484, 0, 485, 486, 487, 488, 489, 490, 491,
+ 304, 305, 306, 307, 308, 309, 426, 427, 428, 429,
+ 430, 431, 432, 433, 310, 492, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
+ 335, 1, 2, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
+ 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
+ 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
+ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
+ 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
+ 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
+ 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
+ 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
+ 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
+ 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
+ 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
+ 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
+ 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
+ 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
+ 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
+ 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
+ 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
+ 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
+ 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
+ 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
+ 290, 0, 0, 414, 415, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 327, 0, 0, 0,
+ 0, 0, 416, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 480, 417, 418, 419, 420, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 292, 293, 294, 295,
+ 296, 0, 0, 0, 421, 422, 423, 424, 425, 297,
+ 298, 299, 300, 301, 302, 303, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 304, 305,
+ 306, 307, 308, 309, 426, 427, 428, 429, 430, 431,
+ 432, 433, 310, 0, 311, 312, 313, 314, 315, 316,
+ 317, 318, 319, 320, 321, 322, 323, 324, 325, 326,
+ 327, 328, 329, 330, 331, 332, 333, 334, 335, 1,
+ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 0,
+ 0, 414, 415, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 328, 329, 330, 331, 332, 333, 1, 2, 3, 4,
- 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 475, 476, 477, 0, 478,
- 479, 480, 481, 482, 483, 484, 20, 21, 22, 23,
+ 416, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 417, 418, 419, 420, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 292, 293, 294, 295, 0, 0,
+ 0, 0, 421, 422, 423, 424, 425, 297, 298, 299,
+ 300, 301, 302, 303, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 304, 305, 306, 307,
+ 308, 309, 426, 427, 428, 429, 430, 431, 432, 433,
+ 310, 0, 311, 312, 313, 314, 315, 316, 317, 318,
+ 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
+ 329, 330, 331, 332, 333, 334, 335, 1, 2, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
@@ -1532,141 +1825,21 @@ static const yytype_int16 yytable[] =
254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
- 324, 325, 485, 412, 326, 413, 414, 415, 416, 417,
- 418, 419, 420, 421, 422, 423, 424, 0, 0, 425,
- 426, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 427, 0,
- 486, 0, 487, 488, 0, 0, 0, 0, 489, 428,
- 429, 430, 431, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 328, 329, 330, 331, 332, 333, 1, 2,
- 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 475, 476, 477,
- 0, 478, 479, 480, 481, 482, 483, 484, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
- 322, 323, 324, 325, 485, 412, 326, 413, 414, 415,
- 416, 417, 418, 419, 420, 421, 422, 423, 424, 0,
- 0, 425, 426, 0, 0, 0, 0, 0, 0, 0,
+ 284, 285, 286, 287, 288, 289, 290, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 427, 0, 486, 0, 487, 603, 0, 0, 0, 0,
- 489, 428, 429, 430, 431, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 328, 329, 330, 331, 332, 333,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 475,
- 476, 477, 0, 478, 479, 480, 481, 482, 483, 484,
- 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
- 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
- 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
- 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
- 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 485, 412, 326, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, 423,
- 424, 0, 0, 425, 426, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 427, 0, 486, 0, 487, 0, 0, 0,
- 0, 0, 489, 428, 429, 430, 431, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 328, 329, 330, 331,
- 332, 333, 1, 2, 3, 4, 5, 6, 7, 8,
- 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
- 19, 475, 476, 477, 0, 478, 479, 480, 481, 482,
- 483, 484, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 485, 412,
- 326, 413, 414, 415, 416, 417, 418, 419, 420, 421,
- 422, 423, 424, 0, 0, 425, 426, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 291, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 427, 0, 486, 0, 398, 0,
- 0, 0, 0, 0, 489, 428, 429, 430, 431, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 328, 329,
- 330, 331, 332, 333, 1, 2, 3, 4, 5, 6,
- 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 475, 476, 477, 0, 478, 479, 480,
- 481, 482, 483, 484, 20, 21, 22, 23, 24, 25,
+ 0, 0, 292, 293, 294, 295, 296, 0, 0, 0,
+ 0, 0, 0, 0, 0, 297, 298, 299, 300, 301,
+ 302, 303, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 304, 305, 306, 307, 308, 309,
+ 0, 0, 0, 0, 0, 0, 0, 0, 310, 0,
+ 311, 312, 313, 314, 315, 316, 317, 318, 319, 320,
+ 321, 322, 323, 324, 325, 326, 327, 328, 329, 330,
+ 331, 332, 333, 334, 335, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
@@ -1693,100 +1866,61 @@ static const yytype_int16 yytable[] =
256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 325,
- 485, 412, 326, 413, 414, 415, 416, 417, 418, 419,
- 420, 421, 422, 423, 424, 0, 0, 425, 426, 0,
+ 286, 287, 288, 289, 290, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 427, 0, 486, 0,
- 0, 0, 0, 0, 0, 0, 489, 428, 429, 430,
- 431, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 328, 329, 330, 331, 332, 333, 1, 2, 3, 4,
- 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 20, 21, 22, 23,
- 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
- 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
- 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
- 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
- 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
- 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
- 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
- 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
- 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
- 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
- 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
- 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
- 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
- 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
- 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
- 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
- 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
- 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
- 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
- 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
- 294, 295, 296, 297, 298, 299, 300, 301, 302, 303,
- 304, 305, 306, 307, 308, 309, 310, 311, 312, 313,
- 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
- 324, 325, 0, 412, 326, 413, 414, 415, 416, 417,
- 418, 419, 420, 421, 422, 423, 424, 0, 0, 425,
- 426, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 427, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 489, 428,
- 429, 430, 431, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 328, 329, 330, 331, 332, 333, 1, 2,
- 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
- 322, 323, 324, 325, 0, 0, 326, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 379, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 292, 293, 294, 295, 0, 0, 0, 0, 0, 0,
+ 0, 0, 380, 297, 298, 299, 300, 301, 302, 303,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 304, 305, 306, 307, 308, 309, 0, 0,
+ 0, 0, 0, 0, 0, 0, 310, 0, 311, 312,
+ 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
+ 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
+ 333, 334, 335, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 548,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 327, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 328, 329, 330, 331, 332, 333,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 292, 293,
+ 294, 295, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 297, 298, 299, 300, 301, 302, 303, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 304, 305, 306, 307, 308, 309, 0, 0, 0, 0,
+ 0, 0, 0, 0, 310, 0, 311, 312, 313, 314,
+ 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
+ 325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
+ 335, 1, 2, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
@@ -1814,104 +1948,106 @@ static const yytype_int16 yytable[] =
260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 0, 412, 326, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, 423,
- 424, 0, 0, 425, 426, 0, 0, 0, 0, 0,
+ 290, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 427, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 428, 429, 430, 431, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 328, 329, 330, 331,
- 332, 1, 2, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 0, 0, 0, 0, 0, 0, 0, 630, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 20, 21, 22, 23, 24, 25, 26, 27, 28,
- 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
- 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
- 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
- 59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
- 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
- 79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
- 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
- 99, 100, 101, 102, 103, 104, 105, 106, 107, 108,
- 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
- 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
- 129, 130, 131, 132, 133, 134, 135, 136, 137, 138,
- 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
- 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
- 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
- 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
- 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
- 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
- 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
- 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
- 219, 220, 221, 222, 223, 224, 225, 226, 227, 228,
- 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,
- 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
- 249, 250, 251, 252, 253, 254, 255, 256, 257, 258,
- 259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
- 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
- 279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
- 289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
- 299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
- 309, 310, 311, 312, 313, 314, 315, 316, 317, 318,
- 319, 320, 321, 322, 323, 324, 325, 0, 377, 326,
+ 0, 0, 0, 0, 0, 0, 292, 293, 294, 295,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 297,
+ 298, 299, 300, 301, 302, 303, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 304, 305,
+ 306, 307, 308, 309, 0, 0, 0, 0, 0, 0,
+ 0, 0, 310, 0, 311, 312, 313, 314, 315, 316,
+ 317, 318, 319, 320, 321, 322, 323, 324, 325, 326,
+ 327, 328, 329, 330, 331, 332, 333, 334, 335, 1,
+ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 668, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 292, 293, 294, 295, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 297, 298, 299,
+ 300, 301, 302, 303, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 304, 305, 306, 307,
+ 308, 309, 0, 0, 0, 0, 0, 0, 0, 0,
+ 310, 0, 311, 312, 313, 314, 315, 316, 317, 318,
+ 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
+ 329, 330, 331, 332, 333, 334, 335, 1, 2, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
+ 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+ 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+ 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
+ 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
+ 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
+ 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
+ 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
+ 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
+ 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
+ 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
+ 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
+ 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
+ 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
+ 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
+ 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
+ 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
+ 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
+ 284, 285, 286, 287, 288, 289, 290, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 378, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 328, 329, 330,
- 331, 332, 1, 2, 3, 4, 5, 6, 7, 8,
- 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
- 19, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 0, 0,
- 326, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 549,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 328, 329,
- 330, 331, 332, 1, 2, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 20, 21, 22, 23, 24, 25, 26,
+ 0, 0, 292, 293, 294, 295, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 297, 298, 299, 300, 301,
+ 302, 303, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 304, 305, 306, 307, 308, 309,
+ 0, 0, 0, 0, 0, 0, 0, 0, 310, 0,
+ 311, 312, 313, 314, 315, 316, 317, 318, 319, 320,
+ 321, 322, 323, 324, 325, 326, 327, 328, 329, 330,
+ 331, 332, 333, 334, 335, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
- 57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
@@ -1934,222 +2070,359 @@ static const yytype_int16 yytable[] =
257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
- 287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
- 297, 298, 299, 300, 301, 302, 303, 304, 305, 306,
- 307, 308, 309, 310, 311, 312, 313, 314, 315, 316,
- 317, 318, 319, 320, 321, 322, 323, 324, 325, 0,
- 0, 326, 0, 0, 0, 0, 0, 0, 0, 0,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 416, 0, 0, 0, 520,
+ 687, 0, 0, 0, 0, 0, 417, 418, 419, 420,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 303, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 621, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 328,
- 329, 330, 331, 332, 1, 2, 3, 4, 5, 6,
+ 0, 0, 0, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 318, 2, 3, 4, 5, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 20, 21, 22, 23, 24, 25,
- 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
- 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
- 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
- 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
- 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
- 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
- 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
- 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
- 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
- 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
- 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
- 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
- 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
- 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
- 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
- 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
- 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
- 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
- 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
- 286, 287, 288, 289, 290, 291, 292, 293, 294, 295,
- 296, 297, 298, 299, 300, 301, 302, 303, 304, 305,
- 306, 307, 308, 309, 310, 311, 312, 313, 314, 315,
- 316, 317, 318, 319, 320, 321, 322, 323, 324, 325,
- 0, 0, 326, 0, 0, 0, 0, 0, 0, 0,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 416, 0, 0, 461, 0,
+ 0, 0, 0, 0, 0, 0, 417, 418, 419, 420,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 303, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 663, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 318, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 328, 329, 330, 331, 332, 1, 2, 3, 4, 5,
- 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 20, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
- 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
- 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
- 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 325, 0, 0, 326, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 416, 0, 0, 0, 520,
+ 0, 0, 0, 0, 0, 0, 417, 418, 419, 420,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 303, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 3, 4, 5,
- 6, 7, 0, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 0, 0, 0, 0, 0, 0,
- 0, 328, 329, 330, 331, 332, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 69, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 318, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 325, 0, 412, 326, 413, 414, 415, 416, 417, 418,
- 419, 420, 421, 422, 423, 424, 0, 0, 425, 426,
+ 0, 0, 0, 0, 0, 416, 0, 0, 576, 0,
+ 0, 0, 0, 0, 0, 0, 417, 418, 419, 420,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 427, 0, 0,
- 0, 516, 682, 0, 0, 0, 0, 0, 428, 429,
- 430, 431, 3, 4, 5, 6, 7, 0, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 0,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 303, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 21, 22, 23, 24, 25, 26, 27, 28, 29,
- 30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 69,
+ 0, 0, 0, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 318, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 0, 412, 326, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, 423,
- 424, 0, 0, 425, 426, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 416, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 588, 417, 418, 419, 420,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 427, 0, 0, 459, 0, 0, 0, 0,
- 0, 0, 0, 428, 429, 430, 431, 3, 4, 5,
- 6, 7, 0, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 69, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 303, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 325, 0, 412, 326, 413, 414, 415, 416, 417, 418,
- 419, 420, 421, 422, 423, 424, 0, 0, 425, 426,
+ 0, 0, 0, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 318, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 427, 0, 0,
- 0, 516, 0, 0, 0, 0, 0, 0, 428, 429,
- 430, 431, 3, 4, 5, 6, 7, 0, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 0,
+ 0, 0, 0, 0, 0, 416, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 417, 418, 419, 420,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 303, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 318, 2, 3, 4, 5, 6,
+ 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+ 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
+ 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
+ 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
+ 57, 58, 0, 0, 61, 62, 63, 64, 65, 66,
+ 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
+ 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
+ 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
+ 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
+ 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
+ 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
+ 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
+ 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
+ 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
+ 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
+ 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
+ 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
+ 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
+ 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
+ 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
+ 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
+ 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
+ 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
+ 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
+ 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
+ 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 0, 0, 414, 415, 0, 444,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 462, 0, 416, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 417, 418, 419, 420,
+ 528, 529, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 421, 422, 423,
+ 424, 425, 297, 0, 0, 0, 0, 302, 538, 0,
+ 0, 541, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 462, 0, 0, 0, 0, 426, 427, 428,
+ 429, 430, 431, 432, 433, 0, 0, 0, 0, 0,
+ 0, 462, 0, 0, 318, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 625, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 631, 632, 633, 462, 462, 462, 462, 462, 462,
+ 462, 462, 462, 462, 462, 462, 462, 462, 462, 462,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 462
+};
+
+static const yytype_int16 yycheck[] =
+{
+ 0, 321, 0, 455, 0, 358, 315, 314, 361, 0,
+ 363, 364, 321, 333, 367, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+ 59, 60, 524, 314, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
+ 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
+ 99, 100, 101, 102, 103, 104, 105, 106, 107, 108,
+ 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
+ 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
+ 129, 130, 131, 132, 133, 134, 135, 136, 137, 138,
+ 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
+ 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
+ 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
+ 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
+ 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
+ 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
+ 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
+ 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
+ 219, 220, 221, 222, 223, 224, 225, 226, 227, 228,
+ 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,
+ 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
+ 249, 250, 251, 252, 253, 254, 255, 256, 257, 258,
+ 259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
+ 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
+ 279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
+ 289, 290, 291, 292, 341, 349, 416, 378, 506, 518,
+ 408, 520, 397, 384, 523, 580, 453, 656, 297, 298,
+ 302, 324, 350, 316, 318, 293, 294, 314, 350, 316,
+ 374, 324, 316, 370, 405, 406, 323, 299, 300, 316,
+ 324, 341, 315, 371, 442, 684, 323, 350, 321, 349,
+ 341, 315, 389, 332, 333, 337, 350, 357, 349, 357,
+ 360, 357, 351, 295, 296, 316, 357, 356, 357, 695,
+ 370, 456, 323, 317, 374, 315, 702, 321, 488, 370,
+ 490, 321, 314, 374, 316, 316, 584, 713, 320, 389,
+ 475, 318, 323, 315, 321, 315, 315, 324, 389, 321,
+ 400, 321, 321, 321, 315, 614, 324, 451, 321, 400,
+ 321, 324, 321, 550, 321, 324, 453, 324, 455, 324,
+ 518, 321, 520, 533, 324, 523, 304, 305, 306, 307,
+ 308, 309, 310, 311, 312, 313, 339, 340, 341, 537,
+ 562, 563, 564, 565, 321, 323, 321, 324, 329, 330,
+ 331, 451, 717, 453, 326, 455, 328, 655, 667, 319,
+ 451, 321, 453, 332, 455, 321, 322, 321, 322, 558,
+ 559, 318, 560, 561, 545, 318, 586, 350, 316, 350,
+ 590, 618, 566, 567, 350, 324, 323, 524, 350, 315,
+ 314, 316, 350, 350, 336, 335, 301, 303, 707, 334,
+ 317, 316, 314, 319, 324, 324, 506, 605, 606, 322,
+ 314, 314, 314, 550, 324, 506, 614, 324, 314, 314,
+ 350, 350, 315, 317, 524, 723, 317, 350, 314, 321,
+ 358, 319, 317, 524, 315, 314, 350, 315, 318, 323,
+ 650, 324, 315, 318, 362, 319, 656, 318, 324, 568,
+ 550, 569, 572, 570, 673, 571, 458, 296, 573, 550,
+ 628, 370, 658, 374, 544, 701, 374, 713, 368, 667,
+ 714, 658, 684, 516, 684, 516, 675, 516, -1, -1,
+ 580, 618, -1, -1, 584, 357, 696, -1, 673, 580,
+ -1, 689, -1, 584, -1, -1, -1, -1, -1, -1,
+ -1, -1, 712, -1, -1, -1, -1, -1, -1, 707,
+ -1, -1, -1, -1, -1, -1, -1, -1, 618, 656,
+ -1, -1, -1, -1, -1, -1, -1, 618, -1, -1,
+ -1, -1, -1, -1, -1, 679, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 684, -1, -1,
+ -1, -1, -1, -1, -1, 655, 656, -1, 658, -1,
+ -1, -1, -1, -1, 655, 656, -1, 658, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 679,
+ -1, -1, -1, -1, 684, -1, -1, -1, 679, -1,
+ -1, -1, -1, 684, -1, 695, -1, -1, -1, -1,
+ -1, -1, 702, -1, 695, -1, -1, -1, -1, -1,
+ -1, 702, -1, 713, -1, -1, -1, 717, -1, -1,
+ -1, -1, 713, 723, -1, -1, 717, -1, -1, -1,
+ 0, -1, 723, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 69,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 85, 86, 87, 88, 89,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 118, 119,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
@@ -2167,66 +2440,193 @@ static const yytype_int16 yytable[] =
260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 0, 412, 326, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, 423,
- 424, 0, 0, 425, 426, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 427, 0, 0, 572, 0, 0, 0, 0,
- 0, 0, 0, 428, 429, 430, 431, 3, 4, 5,
- 6, 7, 0, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 69, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 325, 0, 412, 326, 413, 414, 415, 416, 417, 418,
- 419, 420, 421, 422, 423, 424, 0, 0, 425, 426,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 427, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 580, 428, 429,
- 430, 431, 3, 4, 5, 6, 7, 0, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 290, 291, 292, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 324, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 338, 339,
+ 340, 341, 342, -1, -1, -1, -1, -1, -1, -1,
+ -1, 351, 352, 353, 354, 355, 356, 357, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 370, 371, 372, 373, 374, 375, -1, -1, -1, -1,
+ -1, -1, -1, -1, 384, -1, 386, 387, 388, 389,
+ 390, 391, 392, 393, 394, 395, 396, 397, 398, 399,
+ 400, 401, 402, 403, 404, 405, 406, 407, 408, 409,
+ 410, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
+ 292, -1, -1, 295, 296, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 314, -1, 316, -1, 318, 319, -1, -1,
+ -1, -1, 324, 325, 326, 327, 328, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 338, 339, 340, 341,
+ 342, -1, -1, -1, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, 358, 359, 360, 361,
+ -1, 363, 364, 365, 366, 367, 368, 369, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 380, 381,
+ 382, 383, 384, 385, 386, 387, 388, 389, 390, 391,
+ 392, 393, 394, 395, 396, 397, 398, 399, 400, 401,
+ 402, 403, 404, 405, 406, 407, 408, 409, 410, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
+ 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+ 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+ 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
+ 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
+ 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
+ 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
+ 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
+ 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
+ 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
+ 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
+ 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
+ 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
+ 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
+ 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
+ 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
+ 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
+ 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
+ 284, 285, 286, 287, 288, 289, 290, 291, 292, -1,
+ -1, 295, 296, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 314, -1, 316, -1, 318, 319, -1, -1, -1, -1,
+ 324, 325, 326, 327, 328, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 338, 339, 340, 341, 342, -1,
+ -1, -1, 346, 347, 348, 349, 350, 351, 352, 353,
+ 354, 355, 356, 357, 358, 359, 360, 361, -1, 363,
+ 364, 365, 366, 367, 368, 369, 370, 371, 372, 373,
+ 374, 375, 376, 377, 378, 379, 380, 381, 382, 383,
+ 384, 385, 386, 387, 388, 389, 390, 391, 392, 393,
+ 394, 395, 396, 397, 398, 399, 400, 401, 402, 403,
+ 404, 405, 406, 407, 408, 409, 410, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
+ 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
+ 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
+ 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
+ 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
+ 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
+ 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
+ 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
+ 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
+ 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
+ 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
+ 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
+ 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
+ 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
+ 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
+ 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
+ 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
+ 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
+ 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
+ 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
+ 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
+ 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
+ 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
+ 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
+ 286, 287, 288, 289, 290, 291, 292, -1, -1, 295,
+ 296, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 314, -1,
+ 316, -1, 318, -1, -1, -1, -1, -1, 324, 325,
+ 326, 327, 328, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 338, 339, 340, 341, 342, -1, -1, -1,
+ 346, 347, 348, 349, 350, 351, 352, 353, 354, 355,
+ 356, 357, 358, 359, 360, 361, -1, 363, 364, 365,
+ 366, 367, 368, 369, 370, 371, 372, 373, 374, 375,
+ 376, 377, 378, 379, 380, 381, 382, 383, 384, 385,
+ 386, 387, 388, 389, 390, 391, 392, 393, 394, 395,
+ 396, 397, 398, 399, 400, 401, 402, 403, 404, 405,
+ 406, 407, 408, 409, 410, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, -1, -1, 295, 296, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 314, -1, 316, -1,
+ 318, -1, -1, -1, -1, -1, 324, 325, 326, 327,
+ 328, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 338, 339, 340, 341, 342, -1, -1, -1, 346, 347,
+ 348, 349, 350, 351, 352, 353, 354, 355, 356, 357,
+ 358, 359, 360, 361, -1, 363, 364, 365, 366, 367,
+ 368, 369, 370, 371, 372, 373, 374, 375, 376, 377,
+ 378, 379, 380, 381, 382, 383, 384, 385, 386, 387,
+ 388, 389, 390, 391, 392, 393, 394, 395, 396, 397,
+ 398, 399, 400, 401, 402, 403, 404, 405, 406, 407,
+ 408, 409, 410, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 69,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 85, 86, 87, 88, 89,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 118, 119,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
@@ -2244,66 +2644,193 @@ static const yytype_int16 yytable[] =
260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 0, 412, 326, 413,
- 414, 415, 416, 417, 418, 419, 420, 421, 422, 423,
- 424, 0, 0, 425, 426, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 427, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 428, 429, 430, 431, 3, 4, 5,
- 6, 7, 0, 9, 10, 11, 12, 13, 14, 15,
- 16, 17, 18, 19, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 69, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 534, 0, 412, 326, 413, 414, 415, 416, 417, 418,
- 419, 420, 421, 422, 423, 424, 0, 0, 425, 426,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 427, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 428, 429,
- 430, 431, 3, 4, 5, 6, 7, 0, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 290, 291, 292, -1, -1, 295, 296, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 314, -1, 316, -1, -1, -1,
+ -1, -1, -1, -1, 324, 325, 326, 327, 328, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 338, 339,
+ 340, 341, 342, -1, -1, -1, 346, 347, 348, 349,
+ 350, 351, 352, 353, 354, 355, 356, 357, 358, 359,
+ 360, 361, -1, 363, 364, 365, 366, 367, 368, 369,
+ 370, 371, 372, 373, 374, 375, 376, 377, 378, 379,
+ 380, 381, 382, 383, 384, 385, 386, 387, 388, 389,
+ 390, 391, 392, 393, 394, 395, 396, 397, 398, 399,
+ 400, 401, 402, 403, 404, 405, 406, 407, 408, 409,
+ 410, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
+ 292, -1, -1, 295, 296, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 314, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 324, 325, 326, 327, 328, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 338, 339, 340, 341,
+ 342, -1, -1, -1, 346, 347, 348, 349, 350, 351,
+ 352, 353, 354, 355, 356, 357, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 370, 371,
+ 372, 373, 374, 375, 376, 377, 378, 379, 380, 381,
+ 382, 383, 384, -1, 386, 387, 388, 389, 390, 391,
+ 392, 393, 394, 395, 396, 397, 398, 399, 400, 401,
+ 402, 403, 404, 405, 406, 407, 408, 409, 410, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
+ 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+ 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+ 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
+ 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
+ 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
+ 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
+ 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
+ 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
+ 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
+ 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
+ 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
+ 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
+ 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
+ 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
+ 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
+ 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
+ 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
+ 284, 285, 286, 287, 288, 289, 290, 291, 292, -1,
+ -1, 295, 296, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 314, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 325, 326, 327, 328, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 338, 339, 340, 341, -1, -1,
+ -1, -1, 346, 347, 348, 349, 350, 351, 352, 353,
+ 354, 355, 356, 357, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 370, 371, 372, 373,
+ 374, 375, 376, 377, 378, 379, 380, 381, 382, 383,
+ 384, -1, 386, 387, 388, 389, 390, 391, 392, 393,
+ 394, 395, 396, 397, 398, 399, 400, 401, 402, 403,
+ 404, 405, 406, 407, 408, 409, 410, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
+ 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
+ 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
+ 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
+ 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
+ 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
+ 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
+ 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
+ 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
+ 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
+ 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
+ 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
+ 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
+ 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
+ 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
+ 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
+ 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
+ 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
+ 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
+ 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
+ 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
+ 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
+ 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
+ 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
+ 286, 287, 288, 289, 290, 291, 292, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 324, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 338, 339, 340, 341, 342, -1, -1, -1,
+ -1, -1, -1, -1, -1, 351, 352, 353, 354, 355,
+ 356, 357, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 370, 371, 372, 373, 374, 375,
+ -1, -1, -1, -1, -1, -1, -1, -1, 384, -1,
+ 386, 387, 388, 389, 390, 391, 392, 393, 394, 395,
+ 396, 397, 398, 399, 400, 401, 402, 403, 404, 405,
+ 406, 407, 408, 409, 410, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
+ 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,
+ 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
+ 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
+ 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
+ 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
+ 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
+ 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
+ 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
+ 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
+ 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
+ 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
+ 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
+ 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
+ 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
+ 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
+ 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
+ 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
+ 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
+ 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
+ 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
+ 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
+ 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
+ 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
+ 288, 289, 290, 291, 292, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 324, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 338, 339, 340, 341, -1, -1, -1, -1, -1, -1,
+ -1, -1, 350, 351, 352, 353, 354, 355, 356, 357,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 370, 371, 372, 373, 374, 375, -1, -1,
+ -1, -1, -1, -1, -1, -1, 384, -1, 386, 387,
+ 388, 389, 390, 391, 392, 393, 394, 395, 396, 397,
+ 398, 399, 400, 401, 402, 403, 404, 405, 406, 407,
+ 408, 409, 410, 3, 4, 5, 6, 7, 8, 9,
+ 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
+ 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 85, 86, 87, 88, 89,
+ 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
+ 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
+ 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 118, 119,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
+ 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
@@ -2321,93 +2848,147 @@ static const yytype_int16 yytable[] =
260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 0, 0, 326
-};
-
-static const yytype_int16 yycheck[] =
-{
- 0, 0, 0, 339, 347, 24, 0, 26, 27, 395,
- 81, 30, 427, 514, 502, 516, 406, 381, 519, 456,
- 588, 340, 453, 358, 359, 653, 340, 382, 377, 372,
- 363, 395, 368, 691, 385, 376, 385, 695, 375, 394,
- 377, 382, 376, 671, 375, 703, 376, 384, 382, 377,
- 440, 375, 382, 356, 357, 377, 384, 393, 393, 394,
- 379, 425, 426, 385, 450, 398, 481, 376, 483, 403,
- 404, 385, 375, 377, 377, 377, 382, 376, 381, 443,
- 384, 378, 384, 382, 376, 382, 450, 473, 576, 520,
- 382, 365, 366, 367, 368, 369, 370, 371, 372, 373,
- 374, 558, 559, 560, 561, 354, 355, 544, 609, 473,
- 384, 454, 379, 376, 529, 382, 376, 453, 385, 382,
- 456, 376, 382, 340, 514, 382, 516, 382, 385, 519,
- 382, 382, 382, 385, 385, 385, 340, 382, 382, 707,
- 385, 385, 380, 533, 382, 401, 402, 403, 390, 391,
- 392, 387, 393, 389, 360, 361, 382, 383, 382, 383,
- 379, 662, 340, 578, 379, 554, 555, 582, 340, 657,
- 377, 556, 557, 384, 538, 385, 613, 340, 562, 563,
- 376, 375, 377, 340, 520, 340, 395, 551, 552, 553,
- 554, 555, 556, 557, 558, 559, 560, 561, 562, 563,
- 564, 565, 566, 567, 568, 569, 397, 708, 544, 396,
- 600, 601, 546, 362, 364, 385, 378, 385, 385, 609,
- 380, 375, 375, 375, 383, 340, 377, 375, 375, 717,
- 645, 375, 340, 376, 340, 339, 378, 378, 653, 375,
- 375, 379, 382, 340, 376, 378, 380, 376, 385, 25,
- 376, 379, 379, 384, 564, 385, 671, 380, 565, 567,
- 678, 566, 372, 368, 452, 372, 333, 568, 545, 340,
- 685, 569, 662, 648, 624, 703, 690, 613, 704, 366,
- 648, 512, 512, 671, 512, 355, 680, 702, -1, -1,
- -1, -1, 678, -1, 684, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 678, -1, -1, 653, 708, -1,
- 339, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 676, -1, 671, -1, -1, -1, 339,
- -1, -1, -1, -1, -1, 339, -1, 347, -1, -1,
- -1, -1, -1, 347, -1, 355, 355, 355, -1, -1,
- -1, 355, 362, -1, -1, -1, -1, -1, 368, -1,
- -1, -1, 372, -1, 368, -1, -1, -1, 372, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 393, -1, -1, -1, -1, 398, 393,
- -1, -1, -1, -1, 398, -1, -1, -1, -1, -1,
+ 290, 291, 292, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 319,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 338, 339,
+ 340, 341, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, 351, 352, 353, 354, 355, 356, 357, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 370, 371, 372, 373, 374, 375, -1, -1, -1, -1,
+ -1, -1, -1, -1, 384, -1, 386, 387, 388, 389,
+ 390, 391, 392, 393, 394, 395, 396, 397, 398, 399,
+ 400, 401, 402, 403, 404, 405, 406, 407, 408, 409,
+ 410, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
+ 292, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 453, 454, -1, 456, -1, -1, 453,
- 454, -1, 456, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 319, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 338, 339, 340, 341,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 351,
+ 352, 353, 354, 355, 356, 357, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 370, 371,
+ 372, 373, 374, 375, -1, -1, -1, -1, -1, -1,
+ -1, -1, 384, -1, 386, 387, 388, 389, 390, 391,
+ 392, 393, 394, 395, 396, 397, 398, 399, 400, 401,
+ 402, 403, 404, 405, 406, 407, 408, 409, 410, 3,
+ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
+ 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,
+ 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
+ 84, 85, 86, 87, 88, 89, 90, 91, 92, 93,
+ 94, 95, 96, 97, 98, 99, 100, 101, 102, 103,
+ 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
+ 114, 115, 116, 117, 118, 119, 120, 121, 122, 123,
+ 124, 125, 126, 127, 128, 129, 130, 131, 132, 133,
+ 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153,
+ 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
+ 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
+ 174, 175, 176, 177, 178, 179, 180, 181, 182, 183,
+ 184, 185, 186, 187, 188, 189, 190, 191, 192, 193,
+ 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
+ 204, 205, 206, 207, 208, 209, 210, 211, 212, 213,
+ 214, 215, 216, 217, 218, 219, 220, 221, 222, 223,
+ 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
+ 234, 235, 236, 237, 238, 239, 240, 241, 242, 243,
+ 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
+ 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
+ 264, 265, 266, 267, 268, 269, 270, 271, 272, 273,
+ 274, 275, 276, 277, 278, 279, 280, 281, 282, 283,
+ 284, 285, 286, 287, 288, 289, 290, 291, 292, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 502, -1, -1, -1, -1, -1, 502, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 520, -1, -1, -1, -1, -1, 520, -1, -1, -1,
+ -1, -1, -1, -1, -1, 319, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 544, -1, -1, -1, -1, -1,
- 544, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 338, 339, 340, 341, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 351, 352, 353,
+ 354, 355, 356, 357, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 370, 371, 372, 373,
+ 374, 375, -1, -1, -1, -1, -1, -1, -1, -1,
+ 384, -1, 386, 387, 388, 389, 390, 391, 392, 393,
+ 394, 395, 396, 397, 398, 399, 400, 401, 402, 403,
+ 404, 405, 406, 407, 408, 409, 410, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
+ 36, 37, 38, 39, 40, 41, 42, 43, 44, 45,
+ 46, 47, 48, 49, 50, 51, 52, 53, 54, 55,
+ 56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
+ 66, 67, 68, 69, 70, 71, 72, 73, 74, 75,
+ 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
+ 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
+ 96, 97, 98, 99, 100, 101, 102, 103, 104, 105,
+ 106, 107, 108, 109, 110, 111, 112, 113, 114, 115,
+ 116, 117, 118, 119, 120, 121, 122, 123, 124, 125,
+ 126, 127, 128, 129, 130, 131, 132, 133, 134, 135,
+ 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,
+ 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
+ 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
+ 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
+ 176, 177, 178, 179, 180, 181, 182, 183, 184, 185,
+ 186, 187, 188, 189, 190, 191, 192, 193, 194, 195,
+ 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
+ 206, 207, 208, 209, 210, 211, 212, 213, 214, 215,
+ 216, 217, 218, 219, 220, 221, 222, 223, 224, 225,
+ 226, 227, 228, 229, 230, 231, 232, 233, 234, 235,
+ 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
+ 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
+ 256, 257, 258, 259, 260, 261, 262, 263, 264, 265,
+ 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
+ 276, 277, 278, 279, 280, 281, 282, 283, 284, 285,
+ 286, 287, 288, 289, 290, 291, 292, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 576, -1, -1, -1,
- -1, -1, 576, -1, -1, -1, -1, -1, 588, -1,
- -1, -1, -1, -1, 588, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 613, -1, -1, -1, -1, -1, 613,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 648, -1,
- -1, -1, -1, 653, 648, -1, -1, 657, -1, 653,
- -1, -1, -1, 657, -1, -1, -1, -1, -1, -1,
- -1, 671, -1, -1, -1, -1, 676, 671, -1, -1,
- -1, -1, 676, -1, -1, -1, -1, -1, -1, -1,
- -1, 691, -1, -1, -1, 695, -1, 691, -1, -1,
- -1, 695, -1, 703, -1, -1, -1, 707, -1, 703,
- -1, -1, -1, 707, -1, -1, -1, 717, -1, -1,
- -1, 0, -1, 717, 3, 4, 5, 6, 7, 8,
+ -1, -1, 338, 339, 340, 341, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 351, 352, 353, 354, 355,
+ 356, 357, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 370, 371, 372, 373, 374, 375,
+ -1, -1, -1, -1, -1, -1, -1, -1, 384, -1,
+ 386, 387, 388, 389, 390, 391, 392, 393, 394, 395,
+ 396, 397, 398, 399, 400, 401, 402, 403, 404, 405,
+ 406, 407, 408, 409, 410, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
- 19, 20, 21, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 33, 34, 35, 36, 37, 38,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
- 59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
@@ -2430,185 +3011,23 @@ static const yytype_int16 yycheck[] =
259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
- 289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
- 299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
- 309, 310, 311, 312, 313, 314, 315, 316, 317, 318,
- 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
- 329, 330, 331, 332, 333, 334, 335, 336, 337, 338,
- -1, -1, 341, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 385, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 399, 400, 401, 402, 403, 404, 3, 4, 5, 6,
- 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 20, 21, 22, 23, 24, -1, 26,
- 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
- 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
- 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
- 57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
- 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
- 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
- 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
- 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
- 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
- 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
- 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
- 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
- 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
- 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
- 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
- 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
- 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
- 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
- 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
- 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
- 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
- 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
- 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
- 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
- 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
- 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
- 287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
- 297, 298, 299, 300, 301, 302, 303, 304, 305, 306,
- 307, 308, 309, 310, 311, 312, 313, 314, 315, 316,
- 317, 318, 319, 320, 321, 322, 323, 324, 325, 326,
- 327, 328, 329, 330, 331, 332, 333, 334, 335, 336,
- 337, 338, 339, 340, 341, 342, 343, 344, 345, 346,
- 347, 348, 349, 350, 351, 352, 353, -1, -1, 356,
- 357, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 375, -1,
- 377, -1, 379, 380, -1, -1, -1, -1, 385, 386,
- 387, 388, 389, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 399, 400, 401, 402, 403, 404, 3, 4,
- 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
- -1, 26, 27, 28, 29, 30, 31, 32, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
- 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
- 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
- 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
- 335, 336, 337, 338, 339, 340, 341, 342, 343, 344,
- 345, 346, 347, 348, 349, 350, 351, 352, 353, -1,
- -1, 356, 357, -1, -1, -1, -1, -1, -1, -1,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 375, -1, 377, -1, 379, 380, -1, -1, -1, -1,
- 385, 386, 387, 388, 389, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 399, 400, 401, 402, 403, 404,
- 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
- 23, 24, -1, 26, 27, 28, 29, 30, 31, 32,
- 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
- 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
- 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
- 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, 339, 340, 341, 342,
- 343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
- 353, -1, -1, 356, 357, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 314, -1, -1, -1, 318,
+ 319, -1, -1, -1, -1, -1, 325, 326, 327, 328,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 375, -1, 377, -1, 379, -1, -1, -1,
- -1, -1, 385, 386, 387, 388, 389, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 399, 400, 401, 402,
- 403, 404, 3, 4, 5, 6, 7, 8, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
- 21, 22, 23, 24, -1, 26, 27, 28, 29, 30,
- 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
- 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
- 51, 52, 53, 54, 55, 56, 57, 58, 59, 60,
- 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
- 71, 72, 73, 74, 75, 76, 77, 78, 79, 80,
- 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
- 91, 92, 93, 94, 95, 96, 97, 98, 99, 100,
- 101, 102, 103, 104, 105, 106, 107, 108, 109, 110,
- 111, 112, 113, 114, 115, 116, 117, 118, 119, 120,
- 121, 122, 123, 124, 125, 126, 127, 128, 129, 130,
- 131, 132, 133, 134, 135, 136, 137, 138, 139, 140,
- 141, 142, 143, 144, 145, 146, 147, 148, 149, 150,
- 151, 152, 153, 154, 155, 156, 157, 158, 159, 160,
- 161, 162, 163, 164, 165, 166, 167, 168, 169, 170,
- 171, 172, 173, 174, 175, 176, 177, 178, 179, 180,
- 181, 182, 183, 184, 185, 186, 187, 188, 189, 190,
- 191, 192, 193, 194, 195, 196, 197, 198, 199, 200,
- 201, 202, 203, 204, 205, 206, 207, 208, 209, 210,
- 211, 212, 213, 214, 215, 216, 217, 218, 219, 220,
- 221, 222, 223, 224, 225, 226, 227, 228, 229, 230,
- 231, 232, 233, 234, 235, 236, 237, 238, 239, 240,
- 241, 242, 243, 244, 245, 246, 247, 248, 249, 250,
- 251, 252, 253, 254, 255, 256, 257, 258, 259, 260,
- 261, 262, 263, 264, 265, 266, 267, 268, 269, 270,
- 271, 272, 273, 274, 275, 276, 277, 278, 279, 280,
- 281, 282, 283, 284, 285, 286, 287, 288, 289, 290,
- 291, 292, 293, 294, 295, 296, 297, 298, 299, 300,
- 301, 302, 303, 304, 305, 306, 307, 308, 309, 310,
- 311, 312, 313, 314, 315, 316, 317, 318, 319, 320,
- 321, 322, 323, 324, 325, 326, 327, 328, 329, 330,
- 331, 332, 333, 334, 335, 336, 337, 338, 339, 340,
- 341, 342, 343, 344, 345, 346, 347, 348, 349, 350,
- 351, 352, 353, -1, -1, 356, 357, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 375, -1, 377, -1, 379, -1,
- -1, -1, -1, -1, 385, 386, 387, 388, 389, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 399, 400,
- 401, 402, 403, 404, 3, 4, 5, 6, 7, 8,
+ -1, -1, -1, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 393, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
- 19, 20, 21, 22, 23, 24, -1, 26, 27, 28,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
- 59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
@@ -2631,265 +3050,62 @@ static const yytype_int16 yycheck[] =
259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
- 289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
- 299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
- 309, 310, 311, 312, 313, 314, 315, 316, 317, 318,
- 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
- 329, 330, 331, 332, 333, 334, 335, 336, 337, 338,
- 339, 340, 341, 342, 343, 344, 345, 346, 347, 348,
- 349, 350, 351, 352, 353, -1, -1, 356, 357, -1,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 375, -1, 377, -1,
- -1, -1, -1, -1, -1, -1, 385, 386, 387, 388,
- 389, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 399, 400, 401, 402, 403, 404, 3, 4, 5, 6,
- 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
- 17, 18, 19, 20, 21, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 33, 34, 35, 36,
- 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,
- 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
- 57, 58, 59, 60, 61, 62, 63, 64, 65, 66,
- 67, 68, 69, 70, 71, 72, 73, 74, 75, 76,
- 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,
- 87, 88, 89, 90, 91, 92, 93, 94, 95, 96,
- 97, 98, 99, 100, 101, 102, 103, 104, 105, 106,
- 107, 108, 109, 110, 111, 112, 113, 114, 115, 116,
- 117, 118, 119, 120, 121, 122, 123, 124, 125, 126,
- 127, 128, 129, 130, 131, 132, 133, 134, 135, 136,
- 137, 138, 139, 140, 141, 142, 143, 144, 145, 146,
- 147, 148, 149, 150, 151, 152, 153, 154, 155, 156,
- 157, 158, 159, 160, 161, 162, 163, 164, 165, 166,
- 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
- 177, 178, 179, 180, 181, 182, 183, 184, 185, 186,
- 187, 188, 189, 190, 191, 192, 193, 194, 195, 196,
- 197, 198, 199, 200, 201, 202, 203, 204, 205, 206,
- 207, 208, 209, 210, 211, 212, 213, 214, 215, 216,
- 217, 218, 219, 220, 221, 222, 223, 224, 225, 226,
- 227, 228, 229, 230, 231, 232, 233, 234, 235, 236,
- 237, 238, 239, 240, 241, 242, 243, 244, 245, 246,
- 247, 248, 249, 250, 251, 252, 253, 254, 255, 256,
- 257, 258, 259, 260, 261, 262, 263, 264, 265, 266,
- 267, 268, 269, 270, 271, 272, 273, 274, 275, 276,
- 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
- 287, 288, 289, 290, 291, 292, 293, 294, 295, 296,
- 297, 298, 299, 300, 301, 302, 303, 304, 305, 306,
- 307, 308, 309, 310, 311, 312, 313, 314, 315, 316,
- 317, 318, 319, 320, 321, 322, 323, 324, 325, 326,
- 327, 328, 329, 330, 331, 332, 333, 334, 335, 336,
- 337, 338, -1, 340, 341, 342, 343, 344, 345, 346,
- 347, 348, 349, 350, 351, 352, 353, -1, -1, 356,
- 357, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 375, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 385, 386,
- 387, 388, 389, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 399, 400, 401, 402, 403, 404, 3, 4,
- 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
- 15, 16, 17, 18, 19, 20, 21, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 33, 34,
- 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
- 45, 46, 47, 48, 49, 50, 51, 52, 53, 54,
- 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,
- 65, 66, 67, 68, 69, 70, 71, 72, 73, 74,
- 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
- 85, 86, 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102, 103, 104,
- 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,
- 115, 116, 117, 118, 119, 120, 121, 122, 123, 124,
- 125, 126, 127, 128, 129, 130, 131, 132, 133, 134,
- 135, 136, 137, 138, 139, 140, 141, 142, 143, 144,
- 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
- 155, 156, 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 191, 192, 193, 194,
- 195, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 210, 211, 212, 213, 214,
- 215, 216, 217, 218, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 229, 230, 231, 232, 233, 234,
- 235, 236, 237, 238, 239, 240, 241, 242, 243, 244,
- 245, 246, 247, 248, 249, 250, 251, 252, 253, 254,
- 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
- 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
- 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
- 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
- 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
- 305, 306, 307, 308, 309, 310, 311, 312, 313, 314,
- 315, 316, 317, 318, 319, 320, 321, 322, 323, 324,
- 325, 326, 327, 328, 329, 330, 331, 332, 333, 334,
- 335, 336, 337, 338, -1, -1, 341, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 385, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 399, 400, 401, 402, 403, 404,
- 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, -1,
+ -1, -1, -1, -1, -1, 314, -1, -1, 317, -1,
+ -1, -1, -1, -1, -1, -1, 325, 326, 327, 328,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
- 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
- 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
- 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, -1, 340, 341, 342,
- 343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
- 353, -1, -1, 356, 357, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 375, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 386, 387, 388, 389, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 399, 400, 401, 402,
- 403, 3, 4, 5, 6, 7, 8, 9, 10, 11,
- 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 33, 34, 35, 36, 37, 38, 39, 40, 41,
- 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
- 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
- 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
- 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
- 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
- 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
- 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
- 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
- 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
- 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
- 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
- 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
- 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
- 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
- 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
- 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
- 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
- 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 297, 298, 299, 300, 301,
- 302, 303, 304, 305, 306, 307, 308, 309, 310, 311,
- 312, 313, 314, 315, 316, 317, 318, 319, 320, 321,
- 322, 323, 324, 325, 326, 327, 328, 329, 330, 331,
- 332, 333, 334, 335, 336, 337, 338, -1, 340, 341,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 385, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 399, 400, 401,
- 402, 403, 3, 4, 5, 6, 7, 8, 9, 10,
- 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
- 21, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 33, 34, 35, 36, 37, 38, 39, 40,
- 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
- 51, 52, 53, 54, 55, 56, 57, 58, 59, 60,
- 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
- 71, 72, 73, 74, 75, 76, 77, 78, 79, 80,
- 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
- 91, 92, 93, 94, 95, 96, 97, 98, 99, 100,
- 101, 102, 103, 104, 105, 106, 107, 108, 109, 110,
- 111, 112, 113, 114, 115, 116, 117, 118, 119, 120,
- 121, 122, 123, 124, 125, 126, 127, 128, 129, 130,
- 131, 132, 133, 134, 135, 136, 137, 138, 139, 140,
- 141, 142, 143, 144, 145, 146, 147, 148, 149, 150,
- 151, 152, 153, 154, 155, 156, 157, 158, 159, 160,
- 161, 162, 163, 164, 165, 166, 167, 168, 169, 170,
- 171, 172, 173, 174, 175, 176, 177, 178, 179, 180,
- 181, 182, 183, 184, 185, 186, 187, 188, 189, 190,
- 191, 192, 193, 194, 195, 196, 197, 198, 199, 200,
- 201, 202, 203, 204, 205, 206, 207, 208, 209, 210,
- 211, 212, 213, 214, 215, 216, 217, 218, 219, 220,
- 221, 222, 223, 224, 225, 226, 227, 228, 229, 230,
- 231, 232, 233, 234, 235, 236, 237, 238, 239, 240,
- 241, 242, 243, 244, 245, 246, 247, 248, 249, 250,
- 251, 252, 253, 254, 255, 256, 257, 258, 259, 260,
- 261, 262, 263, 264, 265, 266, 267, 268, 269, 270,
- 271, 272, 273, 274, 275, 276, 277, 278, 279, 280,
- 281, 282, 283, 284, 285, 286, 287, 288, 289, 290,
- 291, 292, 293, 294, 295, 296, 297, 298, 299, 300,
- 301, 302, 303, 304, 305, 306, 307, 308, 309, 310,
- 311, 312, 313, 314, 315, 316, 317, 318, 319, 320,
- 321, 322, 323, 324, 325, 326, 327, 328, 329, 330,
- 331, 332, 333, 334, 335, 336, 337, 338, -1, -1,
- 341, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 380,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 399, 400,
- 401, 402, 403, 3, 4, 5, 6, 7, 8, 9,
- 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
- 20, 21, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 33, 34, 35, 36, 37, 38, 39,
- 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
- 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
- 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
- 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
- 80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107, 108, 109,
- 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,
- 120, 121, 122, 123, 124, 125, 126, 127, 128, 129,
- 130, 131, 132, 133, 134, 135, 136, 137, 138, 139,
- 140, 141, 142, 143, 144, 145, 146, 147, 148, 149,
- 150, 151, 152, 153, 154, 155, 156, 157, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
- 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
- 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
- 220, 221, 222, 223, 224, 225, 226, 227, 228, 229,
- 230, 231, 232, 233, 234, 235, 236, 237, 238, 239,
- 240, 241, 242, 243, 244, 245, 246, 247, 248, 249,
- 250, 251, 252, 253, 254, 255, 256, 257, 258, 259,
- 260, 261, 262, 263, 264, 265, 266, 267, 268, 269,
- 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
- 280, 281, 282, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 293, 294, 295, 296, 297, 298, 299,
- 300, 301, 302, 303, 304, 305, 306, 307, 308, 309,
- 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
- 320, 321, 322, 323, 324, 325, 326, 327, 328, 329,
- 330, 331, 332, 333, 334, 335, 336, 337, 338, -1,
- -1, 341, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 393, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
+ 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
+ 99, 100, 101, 102, 103, 104, 105, 106, 107, 108,
+ 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
+ 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
+ 129, 130, 131, 132, 133, 134, 135, 136, 137, 138,
+ 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
+ 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
+ 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
+ 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
+ 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
+ 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
+ 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
+ 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
+ 219, 220, 221, 222, 223, 224, 225, 226, 227, 228,
+ 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,
+ 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
+ 249, 250, 251, 252, 253, 254, 255, 256, 257, 258,
+ 259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
+ 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
+ 279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 314, -1, -1, -1, 318,
+ -1, -1, -1, -1, -1, -1, 325, 326, 327, 328,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 380, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 399,
- 400, 401, 402, 403, 3, 4, 5, 6, 7, 8,
+ -1, -1, -1, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 393, 4, 5, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
- 19, 20, 21, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 33, 34, 35, 36, 37, 38,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
- 59, 60, 61, 62, 63, 64, 65, 66, 67, 68,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
@@ -2912,358 +3128,153 @@ static const yytype_int16 yycheck[] =
259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
- 289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
- 299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
- 309, 310, 311, 312, 313, 314, 315, 316, 317, 318,
- 319, 320, 321, 322, 323, 324, 325, 326, 327, 328,
- 329, 330, 331, 332, 333, 334, 335, 336, 337, 338,
- -1, -1, 341, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 314, -1, -1, 317, -1,
+ -1, -1, -1, -1, -1, -1, 325, 326, 327, 328,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 380, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 399, 400, 401, 402, 403, 3, 4, 5, 6, 7,
- 8, 9, 10, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, 73, 74, 75, 76, 77,
- 78, 79, 80, 81, 82, 83, 84, 85, 86, 87,
- 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,
- 128, 129, 130, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, 327,
- 328, 329, 330, 331, 332, 333, 334, 335, 336, 337,
- 338, -1, -1, 341, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 393, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
+ 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
+ 99, 100, 101, 102, 103, 104, 105, 106, 107, 108,
+ 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
+ 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
+ 129, 130, 131, 132, 133, 134, 135, 136, 137, 138,
+ 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
+ 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
+ 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
+ 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
+ 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
+ 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
+ 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
+ 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
+ 219, 220, 221, 222, 223, 224, 225, 226, 227, 228,
+ 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,
+ 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
+ 249, 250, 251, 252, 253, 254, 255, 256, 257, 258,
+ 259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
+ 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
+ 279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 314, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, 324, 325, 326, 327, 328,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 5, 6, 7,
- 8, 9, -1, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, -1, -1, -1, -1, -1, -1,
- -1, 399, 400, 401, 402, 403, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 82, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, 327,
- 328, 329, 330, 331, 332, 333, 334, 335, 336, 337,
- 338, -1, 340, 341, 342, 343, 344, 345, 346, 347,
- 348, 349, 350, 351, 352, 353, -1, -1, 356, 357,
+ -1, -1, -1, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 393, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
+ 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
+ 99, 100, 101, 102, 103, 104, 105, 106, 107, 108,
+ 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
+ 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
+ 129, 130, 131, 132, 133, 134, 135, 136, 137, 138,
+ 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
+ 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
+ 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
+ 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
+ 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
+ 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
+ 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
+ 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
+ 219, 220, 221, 222, 223, 224, 225, 226, 227, 228,
+ 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,
+ 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
+ 249, 250, 251, 252, 253, 254, 255, 256, 257, 258,
+ 259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
+ 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
+ 279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 375, -1, -1,
- -1, 379, 380, -1, -1, -1, -1, -1, 386, 387,
- 388, 389, 5, 6, 7, 8, 9, -1, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, -1,
+ -1, -1, -1, -1, -1, 314, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 325, 326, 327, 328,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 82,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, -1, 340, 341, 342,
- 343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
- 353, -1, -1, 356, 357, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, 393, 4, 5, 6, 7, 8,
+ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
+ 19, 20, 21, 22, 23, 24, 25, 26, 27, 28,
+ 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
+ 39, 40, 41, 42, 43, 44, 45, 46, 47, 48,
+ 49, 50, 51, 52, 53, 54, 55, 56, 57, 58,
+ 59, 60, -1, -1, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 72, 73, 74, 75, 76, 77, 78,
+ 79, 80, 81, 82, 83, 84, 85, 86, 87, 88,
+ 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
+ 99, 100, 101, 102, 103, 104, 105, 106, 107, 108,
+ 109, 110, 111, 112, 113, 114, 115, 116, 117, 118,
+ 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
+ 129, 130, 131, 132, 133, 134, 135, 136, 137, 138,
+ 139, 140, 141, 142, 143, 144, 145, 146, 147, 148,
+ 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
+ 159, 160, 161, 162, 163, 164, 165, 166, 167, 168,
+ 169, 170, 171, 172, 173, 174, 175, 176, 177, 178,
+ 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
+ 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
+ 199, 200, 201, 202, 203, 204, 205, 206, 207, 208,
+ 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
+ 219, 220, 221, 222, 223, 224, 225, 226, 227, 228,
+ 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,
+ 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
+ 249, 250, 251, 252, 253, 254, 255, 256, 257, 258,
+ 259, 260, 261, 262, 263, 264, 265, 266, 267, 268,
+ 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
+ 279, 280, 281, 282, 283, 284, 285, 286, 287, 288,
+ 289, 290, 291, 292, -1, -1, 295, 296, -1, 383,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 375, -1, -1, 378, -1, -1, -1, -1,
- -1, -1, -1, 386, 387, 388, 389, 5, 6, 7,
- 8, 9, -1, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 82, -1, -1, -1, -1, -1,
+ -1, -1, -1, 397, -1, 314, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 325, 326, 327, 328,
+ 414, 415, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 346, 347, 348,
+ 349, 350, 351, -1, -1, -1, -1, 356, 357, -1,
+ -1, 445, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, 456, -1, -1, -1, -1, 376, 377, 378,
+ 379, 380, 381, 382, 383, -1, -1, -1, -1, -1,
+ -1, 475, -1, -1, 393, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, 327,
- 328, 329, 330, 331, 332, 333, 334, 335, 336, 337,
- 338, -1, 340, 341, 342, 343, 344, 345, 346, 347,
- 348, 349, 350, 351, 352, 353, -1, -1, 356, 357,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 375, -1, -1,
- -1, 379, -1, -1, -1, -1, -1, -1, 386, 387,
- 388, 389, 5, 6, 7, 8, 9, -1, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 82,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, -1, 340, 341, 342,
- 343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
- 353, -1, -1, 356, 357, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 375, -1, -1, 378, -1, -1, -1, -1,
- -1, -1, -1, 386, 387, 388, 389, 5, 6, 7,
- 8, 9, -1, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 82, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, 542, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, 327,
- 328, 329, 330, 331, 332, 333, 334, 335, 336, 337,
- 338, -1, 340, 341, 342, 343, 344, 345, 346, 347,
- 348, 349, 350, 351, 352, 353, -1, -1, 356, 357,
+ -1, 555, 556, 557, 558, 559, 560, 561, 562, 563,
+ 564, 565, 566, 567, 568, 569, 570, 571, 572, 573,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 375, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 385, 386, 387,
- 388, 389, 5, 6, 7, 8, 9, -1, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, 82,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, -1, 340, 341, 342,
- 343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
- 353, -1, -1, 356, 357, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, 375, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 386, 387, 388, 389, 5, 6, 7,
- 8, 9, -1, 11, 12, 13, 14, 15, 16, 17,
- 18, 19, 20, 21, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
- 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
- 58, 59, 60, 61, 62, 63, 64, 65, 66, 67,
- 68, 69, 70, 71, 72, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, 82, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- 98, 99, 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 115, 116, 117,
- 118, 119, 120, 121, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, 131, 132, 133, 134, 135, 136, 137,
- 138, 139, 140, 141, 142, 143, 144, 145, 146, 147,
- 148, 149, 150, 151, 152, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 192, 193, 194, 195, 196, 197,
- 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,
- 208, 209, 210, 211, 212, 213, 214, 215, 216, 217,
- 218, 219, 220, 221, 222, 223, 224, 225, 226, 227,
- 228, 229, 230, 231, 232, 233, 234, 235, 236, 237,
- 238, 239, 240, 241, 242, 243, 244, 245, 246, 247,
- 248, 249, 250, 251, 252, 253, 254, 255, 256, 257,
- 258, 259, 260, 261, 262, 263, 264, 265, 266, 267,
- 268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
- 278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
- 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
- 308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
- 318, 319, 320, 321, 322, 323, 324, 325, 326, 327,
- 328, 329, 330, 331, 332, 333, 334, 335, 336, 337,
- 338, -1, 340, 341, 342, 343, 344, 345, 346, 347,
- 348, 349, 350, 351, 352, 353, -1, -1, 356, 357,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, 375, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 386, 387,
- 388, 389, 5, 6, 7, 8, 9, -1, 11, 12,
- 13, 14, 15, 16, 17, 18, 19, 20, 21, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, -1, -1, 341
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, 673
};
/* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
@@ -3272,147 +3283,148 @@ static const yytype_uint16 yystos[] =
{
0, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 33, 34, 35, 36, 37, 38, 39, 40, 41, 42,
- 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60, 61, 62,
- 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,
- 73, 74, 75, 76, 77, 78, 79, 80, 81, 82,
- 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
- 93, 94, 95, 96, 97, 98, 99, 100, 101, 102,
- 103, 104, 105, 106, 107, 108, 109, 110, 111, 112,
- 113, 114, 115, 116, 117, 118, 119, 120, 121, 122,
- 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
- 133, 134, 135, 136, 137, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 150, 151, 152,
- 153, 154, 155, 156, 157, 158, 159, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180, 181, 182,
- 183, 184, 185, 186, 187, 188, 189, 190, 191, 192,
- 193, 194, 195, 196, 197, 198, 199, 200, 201, 202,
- 203, 204, 205, 206, 207, 208, 209, 210, 211, 212,
- 213, 214, 215, 216, 217, 218, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 229, 230, 231, 232,
- 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
- 243, 244, 245, 246, 247, 248, 249, 250, 251, 252,
- 253, 254, 255, 256, 257, 258, 259, 260, 261, 262,
- 263, 264, 265, 266, 267, 268, 269, 270, 271, 272,
- 273, 274, 275, 276, 277, 278, 279, 280, 281, 282,
- 283, 284, 285, 286, 287, 288, 289, 290, 291, 292,
- 293, 294, 295, 296, 297, 298, 299, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
- 313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
- 323, 324, 325, 326, 327, 328, 329, 330, 331, 332,
- 333, 334, 335, 336, 337, 338, 341, 385, 399, 400,
- 401, 402, 403, 404, 439, 440, 443, 444, 445, 446,
- 450, 451, 452, 453, 454, 455, 458, 459, 460, 461,
- 462, 464, 469, 470, 471, 511, 512, 513, 375, 375,
- 340, 379, 470, 340, 385, 385, 514, 376, 382, 447,
- 448, 449, 459, 464, 382, 385, 340, 340, 385, 460,
- 464, 393, 466, 467, 0, 512, 340, 463, 81, 340,
- 456, 457, 379, 473, 464, 377, 385, 465, 379, 490,
- 448, 447, 449, 340, 340, 375, 384, 465, 379, 382,
- 385, 442, 340, 342, 343, 344, 345, 346, 347, 348,
- 349, 350, 351, 352, 353, 356, 357, 375, 386, 387,
- 388, 389, 409, 410, 411, 413, 414, 415, 416, 417,
- 418, 419, 420, 421, 462, 464, 468, 465, 376, 382,
- 384, 376, 382, 472, 459, 464, 474, 475, 385, 378,
- 420, 422, 423, 424, 425, 426, 427, 428, 429, 430,
- 431, 432, 433, 377, 385, 22, 23, 24, 26, 27,
- 28, 29, 30, 31, 32, 339, 377, 379, 380, 385,
- 420, 433, 435, 437, 439, 443, 462, 464, 480, 481,
- 482, 483, 491, 492, 493, 494, 497, 498, 501, 502,
- 503, 510, 515, 465, 384, 465, 379, 435, 478, 384,
- 441, 340, 382, 385, 420, 420, 437, 356, 357, 377,
- 381, 376, 376, 382, 338, 435, 375, 420, 382, 394,
- 340, 433, 438, 457, 474, 464, 340, 476, 477, 380,
- 475, 390, 391, 392, 387, 389, 354, 355, 358, 359,
- 393, 394, 360, 361, 397, 396, 395, 362, 364, 363,
- 398, 378, 378, 433, 385, 385, 505, 375, 375, 385,
- 385, 437, 375, 437, 383, 375, 377, 380, 484, 365,
- 366, 367, 368, 369, 370, 371, 372, 373, 374, 384,
- 436, 382, 385, 380, 481, 494, 498, 503, 478, 384,
- 478, 479, 478, 474, 340, 376, 412, 437, 340, 435,
- 420, 380, 476, 465, 382, 385, 420, 420, 420, 422,
- 422, 423, 423, 424, 424, 424, 424, 425, 425, 426,
- 427, 428, 429, 430, 431, 434, 378, 481, 506, 437,
- 385, 437, 383, 504, 340, 516, 517, 491, 435, 435,
- 478, 380, 382, 380, 378, 385, 477, 437, 339, 480,
- 492, 507, 376, 376, 437, 452, 459, 496, 375, 378,
- 382, 485, 380, 478, 383, 375, 496, 508, 509, 487,
- 488, 489, 495, 499, 340, 376, 438, 378, 517, 380,
- 435, 437, 385, 376, 25, 483, 482, 379, 384, 482,
- 486, 490, 376, 376, 437, 486, 487, 491, 500, 478,
- 385, 380
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
+ 82, 83, 84, 85, 86, 87, 88, 89, 90, 91,
+ 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,
+ 102, 103, 104, 105, 106, 107, 108, 109, 110, 111,
+ 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131,
+ 132, 133, 134, 135, 136, 137, 138, 139, 140, 141,
+ 142, 143, 144, 145, 146, 147, 148, 149, 150, 151,
+ 152, 153, 154, 155, 156, 157, 158, 159, 160, 161,
+ 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 174, 175, 176, 177, 178, 179, 180, 181,
+ 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
+ 192, 193, 194, 195, 196, 197, 198, 199, 200, 201,
+ 202, 203, 204, 205, 206, 207, 208, 209, 210, 211,
+ 212, 213, 214, 215, 216, 217, 218, 219, 220, 221,
+ 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241,
+ 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
+ 252, 253, 254, 255, 256, 257, 258, 259, 260, 261,
+ 262, 263, 264, 265, 266, 267, 268, 269, 270, 271,
+ 272, 273, 274, 275, 276, 277, 278, 279, 280, 281,
+ 282, 283, 284, 285, 286, 287, 288, 289, 290, 291,
+ 292, 324, 338, 339, 340, 341, 342, 351, 352, 353,
+ 354, 355, 356, 357, 370, 371, 372, 373, 374, 375,
+ 384, 386, 387, 388, 389, 390, 391, 392, 393, 394,
+ 395, 396, 397, 398, 399, 400, 401, 402, 403, 404,
+ 405, 406, 407, 408, 409, 410, 442, 443, 446, 447,
+ 448, 449, 453, 454, 455, 456, 457, 458, 461, 462,
+ 463, 464, 465, 467, 472, 473, 474, 515, 516, 517,
+ 473, 318, 350, 314, 314, 324, 350, 324, 518, 315,
+ 321, 450, 451, 452, 462, 467, 321, 324, 350, 324,
+ 350, 463, 467, 332, 469, 470, 0, 516, 467, 476,
+ 318, 350, 371, 459, 460, 350, 466, 316, 324, 468,
+ 318, 494, 451, 450, 452, 350, 350, 314, 323, 468,
+ 318, 321, 324, 445, 295, 296, 314, 325, 326, 327,
+ 328, 346, 347, 348, 349, 350, 376, 377, 378, 379,
+ 380, 381, 382, 383, 412, 413, 414, 416, 417, 418,
+ 419, 420, 421, 422, 423, 424, 465, 467, 471, 468,
+ 324, 462, 467, 477, 478, 475, 323, 315, 321, 315,
+ 321, 317, 423, 425, 426, 427, 428, 429, 430, 431,
+ 432, 433, 434, 435, 436, 316, 324, 316, 318, 319,
+ 324, 358, 359, 360, 361, 363, 364, 365, 366, 367,
+ 368, 369, 385, 423, 436, 438, 440, 442, 446, 465,
+ 467, 483, 484, 485, 486, 487, 495, 496, 497, 498,
+ 501, 502, 505, 506, 507, 514, 519, 468, 323, 468,
+ 318, 438, 481, 323, 444, 350, 321, 324, 423, 423,
+ 440, 295, 296, 316, 320, 315, 315, 321, 357, 438,
+ 314, 423, 321, 333, 467, 350, 479, 480, 319, 478,
+ 477, 436, 441, 460, 350, 329, 330, 331, 326, 328,
+ 293, 294, 297, 298, 332, 333, 299, 300, 336, 335,
+ 334, 301, 303, 302, 337, 317, 317, 436, 316, 319,
+ 488, 314, 324, 324, 509, 314, 314, 324, 324, 440,
+ 314, 440, 322, 324, 304, 305, 306, 307, 308, 309,
+ 310, 311, 312, 313, 323, 439, 321, 324, 319, 484,
+ 498, 502, 507, 481, 323, 481, 482, 481, 477, 350,
+ 315, 415, 440, 350, 438, 423, 479, 468, 321, 324,
+ 319, 423, 423, 423, 425, 425, 426, 426, 427, 427,
+ 427, 427, 428, 428, 429, 430, 431, 432, 433, 434,
+ 437, 317, 350, 520, 521, 495, 508, 484, 510, 440,
+ 324, 440, 322, 438, 438, 481, 319, 321, 319, 317,
+ 324, 480, 440, 314, 317, 321, 489, 440, 455, 462,
+ 500, 358, 483, 496, 511, 315, 315, 319, 481, 322,
+ 441, 317, 521, 319, 350, 315, 314, 500, 512, 513,
+ 491, 492, 493, 499, 503, 438, 315, 323, 485, 490,
+ 494, 440, 324, 315, 362, 487, 485, 318, 481, 315,
+ 440, 490, 491, 495, 504, 324, 319
};
/* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */
static const yytype_uint16 yyr1[] =
{
- 0, 408, 409, 410, 410, 410, 410, 410, 410, 410,
- 410, 410, 410, 410, 410, 410, 410, 411, 411, 411,
- 411, 411, 411, 412, 413, 414, 415, 415, 416, 416,
- 417, 417, 418, 419, 419, 419, 420, 420, 420, 420,
- 421, 421, 421, 421, 422, 422, 422, 422, 423, 423,
- 423, 424, 424, 424, 425, 425, 425, 425, 425, 426,
- 426, 426, 427, 427, 428, 428, 429, 429, 430, 430,
- 431, 431, 432, 432, 433, 434, 433, 435, 435, 436,
- 436, 436, 436, 436, 436, 436, 436, 436, 436, 436,
- 437, 437, 438, 439, 439, 439, 439, 439, 439, 439,
- 439, 439, 441, 440, 442, 442, 443, 444, 444, 445,
- 445, 446, 447, 447, 448, 448, 448, 448, 449, 450,
- 450, 450, 450, 450, 451, 451, 451, 451, 451, 452,
- 452, 453, 454, 454, 454, 454, 454, 454, 454, 454,
- 455, 456, 456, 457, 457, 457, 458, 459, 459, 460,
- 460, 460, 460, 460, 460, 460, 461, 461, 461, 461,
- 461, 461, 461, 461, 461, 461, 461, 461, 461, 461,
- 461, 461, 461, 461, 461, 461, 461, 461, 461, 461,
- 461, 461, 461, 461, 461, 462, 463, 463, 464, 464,
- 465, 465, 465, 465, 466, 466, 467, 468, 468, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 469, 469, 469,
- 469, 469, 469, 469, 469, 469, 469, 470, 470, 470,
- 472, 471, 473, 471, 474, 474, 475, 475, 476, 476,
- 477, 477, 478, 478, 478, 479, 479, 480, 481, 481,
- 482, 482, 482, 482, 482, 482, 482, 483, 484, 485,
- 483, 486, 486, 488, 487, 489, 487, 490, 490, 491,
- 491, 492, 492, 493, 493, 494, 495, 495, 496, 496,
- 497, 497, 499, 498, 500, 500, 501, 501, 502, 502,
- 504, 503, 505, 503, 506, 503, 507, 507, 508, 508,
- 509, 509, 510, 510, 510, 510, 510, 511, 511, 512,
- 512, 512, 514, 513, 515, 516, 516, 517, 517
+ 0, 411, 412, 413, 413, 413, 413, 413, 413, 413,
+ 413, 413, 413, 413, 413, 413, 413, 414, 414, 414,
+ 414, 414, 414, 415, 416, 417, 418, 418, 419, 419,
+ 420, 420, 421, 422, 422, 422, 423, 423, 423, 423,
+ 424, 424, 424, 424, 425, 425, 425, 425, 426, 426,
+ 426, 427, 427, 427, 428, 428, 428, 428, 428, 429,
+ 429, 429, 430, 430, 431, 431, 432, 432, 433, 433,
+ 434, 434, 435, 435, 436, 437, 436, 438, 438, 439,
+ 439, 439, 439, 439, 439, 439, 439, 439, 439, 439,
+ 440, 440, 441, 442, 442, 442, 442, 442, 442, 442,
+ 442, 442, 444, 443, 445, 445, 446, 447, 447, 448,
+ 448, 449, 450, 450, 451, 451, 451, 451, 452, 453,
+ 453, 453, 453, 453, 454, 454, 454, 454, 454, 455,
+ 455, 456, 457, 457, 457, 457, 457, 457, 457, 457,
+ 458, 459, 459, 460, 460, 460, 461, 462, 462, 463,
+ 463, 463, 463, 463, 463, 463, 464, 464, 464, 464,
+ 464, 464, 464, 464, 464, 464, 464, 464, 464, 464,
+ 464, 464, 464, 464, 464, 464, 464, 464, 464, 464,
+ 464, 464, 464, 464, 464, 465, 466, 466, 467, 467,
+ 468, 468, 468, 468, 469, 469, 470, 471, 471, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 472,
+ 472, 472, 472, 472, 472, 472, 472, 472, 472, 473,
+ 473, 473, 475, 474, 476, 474, 477, 477, 478, 478,
+ 479, 479, 480, 480, 481, 481, 481, 482, 482, 483,
+ 484, 484, 485, 485, 485, 485, 485, 485, 485, 485,
+ 486, 487, 488, 489, 487, 490, 490, 492, 491, 493,
+ 491, 494, 494, 495, 495, 496, 496, 497, 497, 498,
+ 499, 499, 500, 500, 501, 501, 503, 502, 504, 504,
+ 505, 505, 506, 506, 508, 507, 509, 507, 510, 507,
+ 511, 511, 512, 512, 513, 513, 514, 514, 514, 514,
+ 514, 515, 515, 516, 516, 516, 518, 517, 519, 520,
+ 520, 521, 521
};
/* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */
static const yytype_uint8 yyr2[] =
{
- 0, 2, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 3, 1, 4, 1,
+ 0, 2, 1, 1, 3, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 4, 1,
3, 2, 2, 1, 1, 1, 2, 2, 2, 1,
2, 3, 2, 1, 1, 1, 1, 2, 2, 2,
1, 1, 1, 1, 1, 3, 3, 3, 1, 3,
@@ -3460,15 +3472,16 @@ static const yytype_uint8 yyr2[] =
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 0, 6, 0, 5, 1, 2, 3, 4, 1, 3,
- 1, 2, 1, 3, 4, 1, 3, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 2, 0, 0,
- 5, 1, 1, 0, 2, 0, 2, 2, 3, 1,
- 2, 1, 2, 1, 2, 5, 3, 1, 1, 4,
- 1, 2, 0, 8, 0, 1, 3, 2, 1, 2,
- 0, 6, 0, 8, 0, 7, 1, 1, 1, 0,
- 2, 3, 2, 2, 2, 3, 2, 1, 2, 1,
- 1, 1, 0, 3, 5, 1, 3, 1, 4
+ 1, 1, 0, 6, 0, 5, 1, 2, 3, 4,
+ 1, 3, 1, 2, 1, 3, 4, 1, 3, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 2, 2, 0, 0, 5, 1, 1, 0, 2, 0,
+ 2, 2, 3, 1, 2, 1, 2, 1, 2, 5,
+ 3, 1, 1, 4, 1, 2, 0, 8, 0, 1,
+ 3, 2, 1, 2, 0, 6, 0, 8, 0, 7,
+ 1, 1, 1, 0, 2, 3, 2, 2, 2, 3,
+ 2, 1, 2, 1, 1, 1, 0, 3, 5, 1,
+ 3, 1, 4
};
@@ -4151,250 +4164,252 @@ yyreduce:
switch (yyn)
{
case 2:
-#line 302 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 352 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleVariable((yyvsp[0].lex).loc, (yyvsp[0].lex).symbol, (yyvsp[0].lex).string);
}
-#line 4159 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4172 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 3:
-#line 308 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 358 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 4167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4180 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 4:
-#line 311 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 361 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
+ (yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode);
+ if ((yyval.interm.intermTypedNode)->getAsConstantUnion())
+ (yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression();
}
-#line 4176 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4190 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 5:
-#line 315 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 366 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true);
}
-#line 4185 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4198 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 6:
-#line 319 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 369 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 4193 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4206 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 7:
-#line 322 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 372 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned literal");
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 4202 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4215 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 8:
-#line 326 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 376 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i64, (yyvsp[0].lex).loc, true);
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true);
}
-#line 4211 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4223 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 9:
-#line 330 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 380 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u64, (yyvsp[0].lex).loc, true);
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 4220 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4232 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 10:
-#line 334 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 384 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit integer literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((short)(yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 4229 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4241 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 11:
-#line 338 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 388 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit unsigned integer literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((unsigned short)(yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).i64, (yyvsp[0].lex).loc, true);
}
-#line 4238 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4250 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 12:
-#line 342 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 392 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat, (yyvsp[0].lex).loc, true);
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).u64, (yyvsp[0].lex).loc, true);
}
-#line 4246 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4259 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 13:
-#line 345 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 396 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtDouble, (yyvsp[0].lex).loc, true);
+ parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit integer literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((short)(yyvsp[0].lex).i, (yyvsp[0].lex).loc, true);
}
-#line 4255 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4268 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 14:
-#line 349 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 400 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.float16Check((yyvsp[0].lex).loc, "half float literal");
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat16, (yyvsp[0].lex).loc, true);
+ parseContext.explicitInt16Check((yyvsp[0].lex).loc, "16-bit unsigned integer literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((unsigned short)(yyvsp[0].lex).u, (yyvsp[0].lex).loc, true);
}
-#line 4264 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4277 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 15:
-#line 353 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 404 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).b, (yyvsp[0].lex).loc, true);
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double literal");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtDouble, (yyvsp[0].lex).loc, true);
}
-#line 4272 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 16:
-#line 356 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 408 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- (yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode);
- if ((yyval.interm.intermTypedNode)->getAsConstantUnion())
- (yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression();
+ parseContext.float16Check((yyvsp[0].lex).loc, "half float literal");
+ (yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion((yyvsp[0].lex).d, EbtFloat16, (yyvsp[0].lex).loc, true);
}
-#line 4282 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4295 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 17:
-#line 364 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 416 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 4290 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 18:
-#line 367 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 419 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBracketDereference((yyvsp[-2].lex).loc, (yyvsp[-3].interm.intermTypedNode), (yyvsp[-1].interm.intermTypedNode));
}
-#line 4298 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4311 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 19:
-#line 370 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 422 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 4306 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4319 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 20:
-#line 373 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 425 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleDotDereference((yyvsp[0].lex).loc, (yyvsp[-2].interm.intermTypedNode), *(yyvsp[0].lex).string);
}
-#line 4314 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4327 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 21:
-#line 376 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 428 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.variableCheck((yyvsp[-1].interm.intermTypedNode));
parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "++", (yyvsp[-1].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "++", EOpPostIncrement, (yyvsp[-1].interm.intermTypedNode));
}
-#line 4324 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4337 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 22:
-#line 381 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 433 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.variableCheck((yyvsp[-1].interm.intermTypedNode));
parseContext.lValueErrorCheck((yyvsp[0].lex).loc, "--", (yyvsp[-1].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[0].lex).loc, "--", EOpPostDecrement, (yyvsp[-1].interm.intermTypedNode));
}
-#line 4334 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4347 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 23:
-#line 389 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 441 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.integerCheck((yyvsp[0].interm.intermTypedNode), "[]");
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 4343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4356 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 24:
-#line 396 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 448 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleFunctionCall((yyvsp[0].interm).loc, (yyvsp[0].interm).function, (yyvsp[0].interm).intermNode);
delete (yyvsp[0].interm).function;
}
-#line 4352 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4365 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 25:
-#line 403 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 455 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
}
-#line 4360 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4373 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 26:
-#line 409 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 461 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-1].interm);
(yyval.interm).loc = (yyvsp[0].lex).loc;
}
-#line 4369 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4382 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 27:
-#line 413 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 465 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-1].interm);
(yyval.interm).loc = (yyvsp[0].lex).loc;
}
-#line 4378 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4391 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 28:
-#line 420 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 472 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-1].interm);
}
-#line 4386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4399 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 29:
-#line 423 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 475 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
}
-#line 4394 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4407 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 30:
-#line 429 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 481 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
TParameter param = { 0, new TType };
param.type->shallowCopy((yyvsp[0].interm.intermTypedNode)->getType());
@@ -4402,11 +4417,11 @@ yyreduce:
(yyval.interm).function = (yyvsp[-1].interm).function;
(yyval.interm).intermNode = (yyvsp[0].interm.intermTypedNode);
}
-#line 4406 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4419 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 31:
-#line 436 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 488 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
TParameter param = { 0, new TType };
param.type->shallowCopy((yyvsp[0].interm.intermTypedNode)->getType());
@@ -4414,29 +4429,29 @@ yyreduce:
(yyval.interm).function = (yyvsp[-2].interm).function;
(yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-2].interm).intermNode, (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc);
}
-#line 4418 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4431 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 32:
-#line 446 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 498 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-1].interm);
}
-#line 4426 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4439 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 33:
-#line 454 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 506 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// Constructor
(yyval.interm).intermNode = 0;
(yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type));
}
-#line 4436 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 34:
-#line 459 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 511 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
//
// Should be a method or subroutine call, but we haven't recognized the arguments yet.
@@ -4464,50 +4479,50 @@ yyreduce:
(yyval.interm).function = new TFunction(empty, TType(EbtVoid), EOpNull);
}
}
-#line 4468 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4481 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 35:
-#line 486 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 539 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// Constructor
(yyval.interm).intermNode = 0;
(yyval.interm).function = parseContext.handleConstructorCall((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type));
}
-#line 4478 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4491 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 36:
-#line 494 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 548 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.variableCheck((yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
if (TIntermMethod* method = (yyvsp[0].interm.intermTypedNode)->getAsMethodNode())
parseContext.error((yyvsp[0].interm.intermTypedNode)->getLoc(), "incomplete method syntax", method->getMethodName().c_str(), "");
}
-#line 4489 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4502 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 37:
-#line 500 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 554 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "++", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "++", EOpPreIncrement, (yyvsp[0].interm.intermTypedNode));
}
-#line 4498 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4511 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 38:
-#line 504 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 558 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.lValueErrorCheck((yyvsp[-1].lex).loc, "--", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.handleUnaryMath((yyvsp[-1].lex).loc, "--", EOpPreDecrement, (yyvsp[0].interm.intermTypedNode));
}
-#line 4507 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4520 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 39:
-#line 508 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 562 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-1].interm).op != EOpNull) {
char errorOp[2] = {0, 0};
@@ -4524,179 +4539,179 @@ yyreduce:
(yyval.interm.intermTypedNode)->getAsConstantUnion()->setExpression();
}
}
-#line 4528 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4541 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 40:
-#line 528 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 582 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNull; }
-#line 4534 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4547 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 41:
-#line 529 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 583 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpNegative; }
-#line 4540 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4553 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 42:
-#line 530 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 584 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLogicalNot; }
-#line 4546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4559 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 43:
-#line 531 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 585 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpBitwiseNot;
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise not"); }
-#line 4553 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 44:
-#line 537 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 591 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4559 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 45:
-#line 538 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 592 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "*", EOpMul, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4582 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 46:
-#line 543 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 597 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "/", EOpDiv, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4579 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4592 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 47:
-#line 548 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 602 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "%");
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "%", EOpMod, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4590 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4603 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 48:
-#line 557 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 611 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 49:
-#line 558 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 612 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "+", EOpAdd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4606 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 50:
-#line 563 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 617 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "-", EOpSub, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4616 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4629 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 51:
-#line 571 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 625 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4622 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4635 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 52:
-#line 572 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 626 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bit shift left");
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<<", EOpLeftShift, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4633 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4646 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 53:
-#line 578 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 632 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bit shift right");
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">>", EOpRightShift, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4644 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4657 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 54:
-#line 587 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 641 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4650 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4663 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 55:
-#line 588 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 642 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<", EOpLessThan, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4660 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4673 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 56:
-#line 593 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 647 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">", EOpGreaterThan, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4670 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4683 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 57:
-#line 598 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 652 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "<=", EOpLessThanEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4680 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4693 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 58:
-#line 603 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 657 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, ">=", EOpGreaterThanEqual, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4690 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4703 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 59:
-#line 611 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 665 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4696 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4709 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 60:
-#line 612 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 666 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.arrayObjectCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array comparison");
parseContext.opaqueCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "==");
@@ -4706,11 +4721,11 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4710 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4723 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 61:
-#line 621 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 675 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.arrayObjectCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array comparison");
parseContext.opaqueCheck((yyvsp[-1].lex).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "!=");
@@ -4720,124 +4735,124 @@ yyreduce:
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4724 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4737 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 62:
-#line 633 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 687 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4730 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4743 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 63:
-#line 634 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 688 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise and");
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "&", EOpAnd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4741 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4754 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 64:
-#line 643 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 697 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4747 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4760 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 65:
-#line 644 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 698 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise exclusive or");
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "^", EOpExclusiveOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4758 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4771 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 66:
-#line 653 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 707 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4764 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4777 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 67:
-#line 654 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 708 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[-1].lex).loc, "bitwise inclusive or");
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "|", EOpInclusiveOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 4775 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 68:
-#line 663 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 717 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4781 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4794 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 69:
-#line 664 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 718 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "&&", EOpLogicalAnd, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4791 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4804 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 70:
-#line 672 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 726 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4797 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 71:
-#line 673 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 727 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "^^", EOpLogicalXor, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4807 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4820 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 72:
-#line 681 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 735 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4813 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4826 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 73:
-#line 682 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 736 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.handleBinaryMath((yyvsp[-1].lex).loc, "||", EOpLogicalOr, (yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
if ((yyval.interm.intermTypedNode) == 0)
(yyval.interm.intermTypedNode) = parseContext.intermediate.addConstantUnion(false, (yyvsp[-1].lex).loc);
}
-#line 4823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 74:
-#line 690 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 744 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4829 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 75:
-#line 691 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 745 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
++parseContext.controlFlowNestingLevel;
}
-#line 4837 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4850 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 76:
-#line 694 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 748 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
--parseContext.controlFlowNestingLevel;
parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-5].interm.intermTypedNode));
@@ -4850,17 +4865,17 @@ yyreduce:
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
}
-#line 4854 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4867 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 77:
-#line 709 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 763 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode); }
-#line 4860 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4873 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 78:
-#line 710 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 764 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.arrayObjectCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "array assignment");
parseContext.opaqueCheck((yyvsp[-1].interm).loc, (yyvsp[-2].interm.intermTypedNode)->getType(), "=");
@@ -4874,119 +4889,119 @@ yyreduce:
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
}
-#line 4878 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4891 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 79:
-#line 726 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 780 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpAssign;
}
-#line 4887 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4900 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 80:
-#line 730 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 784 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpMulAssign;
}
-#line 4896 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4909 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 81:
-#line 734 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 788 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpDivAssign;
}
-#line 4905 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4918 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 82:
-#line 738 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 792 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "%=");
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpModAssign;
}
-#line 4915 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4928 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 83:
-#line 743 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 797 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpAddAssign;
}
-#line 4924 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4937 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 84:
-#line 747 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 801 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).op = EOpSubAssign;
}
-#line 4933 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 85:
-#line 751 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 805 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift left assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpLeftShiftAssign;
}
-#line 4942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4955 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 86:
-#line 755 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 809 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bit-shift right assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpRightShiftAssign;
}
-#line 4951 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4964 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 87:
-#line 759 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 813 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-and assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpAndAssign;
}
-#line 4960 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4973 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 88:
-#line 763 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 817 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-xor assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpExclusiveOrAssign;
}
-#line 4969 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4982 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 89:
-#line 767 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 821 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "bitwise-or assign");
(yyval.interm).loc = (yyvsp[0].lex).loc; (yyval.interm).op = EOpInclusiveOrAssign;
}
-#line 4978 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4991 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 90:
-#line 774 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 828 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 4986 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 4999 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 91:
-#line 777 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 831 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.samplerConstructorLocationCheck((yyvsp[-1].lex).loc, ",", (yyvsp[0].interm.intermTypedNode));
(yyval.interm.intermTypedNode) = parseContext.intermediate.addComma((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode), (yyvsp[-1].lex).loc);
@@ -4995,117 +5010,116 @@ yyreduce:
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
}
-#line 4999 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 92:
-#line 788 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 842 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.constantValueCheck((yyvsp[0].interm.intermTypedNode), "");
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 5008 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5021 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 93:
-#line 795 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 849 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.handleFunctionDeclarator((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).function, true /* prototype */);
(yyval.interm.intermNode) = 0;
// TODO: 4.0 functionality: subroutines: make the identifier a user type for this signature
}
-#line 5018 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5031 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 94:
-#line 800 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 854 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-1].interm).intermNode && (yyvsp[-1].interm).intermNode->getAsAggregate())
(yyvsp[-1].interm).intermNode->getAsAggregate()->setOperator(EOpSequence);
(yyval.interm.intermNode) = (yyvsp[-1].interm).intermNode;
}
-#line 5028 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5041 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 95:
-#line 805 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 859 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.profileRequires((yyvsp[-3].lex).loc, ENoProfile, 130, 0, "precision statement");
-
// lazy setting of the previous scope's defaults, has effect only the first time it is called in a particular scope
parseContext.symbolTable.setPreviousDefaultPrecisions(&parseContext.defaultPrecision[0]);
parseContext.setDefaultPrecision((yyvsp[-3].lex).loc, (yyvsp[-1].interm.type), (yyvsp[-2].interm.type).qualifier.precision);
(yyval.interm.intermNode) = 0;
}
-#line 5041 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5053 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 96:
-#line 813 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 866 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.declareBlock((yyvsp[-1].interm).loc, *(yyvsp[-1].interm).typeList);
(yyval.interm.intermNode) = 0;
}
-#line 5050 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5062 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 97:
-#line 817 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 870 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.declareBlock((yyvsp[-2].interm).loc, *(yyvsp[-2].interm).typeList, (yyvsp[-1].lex).string);
(yyval.interm.intermNode) = 0;
}
-#line 5059 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5071 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 98:
-#line 821 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 874 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.declareBlock((yyvsp[-3].interm).loc, *(yyvsp[-3].interm).typeList, (yyvsp[-2].lex).string, (yyvsp[-1].interm).arraySizes);
(yyval.interm.intermNode) = 0;
}
-#line 5068 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5080 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 99:
-#line 825 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 878 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.globalQualifierFixCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier);
parseContext.updateStandaloneQualifierDefaults((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type));
(yyval.interm.intermNode) = 0;
}
-#line 5078 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5090 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 100:
-#line 830 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 883 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.checkNoShaderLayouts((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).shaderQualifiers);
parseContext.addQualifierToExisting((yyvsp[-2].interm.type).loc, (yyvsp[-2].interm.type).qualifier, *(yyvsp[-1].lex).string);
(yyval.interm.intermNode) = 0;
}
-#line 5088 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5100 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 101:
-#line 835 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 888 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.checkNoShaderLayouts((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).shaderQualifiers);
(yyvsp[-1].interm.identifierList)->push_back((yyvsp[-2].lex).string);
parseContext.addQualifierToExisting((yyvsp[-3].interm.type).loc, (yyvsp[-3].interm.type).qualifier, *(yyvsp[-1].interm.identifierList));
(yyval.interm.intermNode) = 0;
}
-#line 5099 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5111 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 102:
-#line 844 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 897 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ parseContext.nestedBlockCheck((yyvsp[-2].interm.type).loc); }
-#line 5105 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5117 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 103:
-#line 844 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 897 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
--parseContext.structNestingLevel;
parseContext.blockName = (yyvsp[-4].lex).string;
@@ -5115,54 +5129,54 @@ yyreduce:
(yyval.interm).loc = (yyvsp[-5].interm.type).loc;
(yyval.interm).typeList = (yyvsp[-1].interm.typeList);
}
-#line 5119 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5131 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 104:
-#line 855 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 908 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.identifierList) = new TIdentifierList;
(yyval.interm.identifierList)->push_back((yyvsp[0].lex).string);
}
-#line 5128 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5140 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 105:
-#line 859 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 912 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.identifierList) = (yyvsp[-2].interm.identifierList);
(yyval.interm.identifierList)->push_back((yyvsp[0].lex).string);
}
-#line 5137 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5149 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 106:
-#line 866 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 919 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).function = (yyvsp[-1].interm.function);
(yyval.interm).loc = (yyvsp[0].lex).loc;
}
-#line 5146 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5158 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 107:
-#line 873 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 926 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.function) = (yyvsp[0].interm.function);
}
-#line 5154 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 108:
-#line 876 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 929 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.function) = (yyvsp[0].interm.function);
}
-#line 5162 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5174 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 109:
-#line 883 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 936 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// Add the parameter
(yyval.interm.function) = (yyvsp[-1].interm.function);
@@ -5171,11 +5185,11 @@ yyreduce:
else
delete (yyvsp[0].interm).param.type;
}
-#line 5175 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5187 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 110:
-#line 891 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 944 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
//
// Only first parameter of one-parameter functions can be void
@@ -5193,11 +5207,11 @@ yyreduce:
(yyvsp[-2].interm.function)->addParameter((yyvsp[0].interm).param);
}
}
-#line 5197 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5209 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 111:
-#line 911 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 964 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-2].interm.type).qualifier.storage != EvqGlobal && (yyvsp[-2].interm.type).qualifier.storage != EvqTemporary) {
parseContext.error((yyvsp[-1].lex).loc, "no qualifiers allowed for function return",
@@ -5217,11 +5231,11 @@ yyreduce:
function = new TFunction((yyvsp[-1].lex).string, type);
(yyval.interm.function) = function;
}
-#line 5221 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5233 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 112:
-#line 934 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 987 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-1].interm.type).arraySizes) {
parseContext.profileRequires((yyvsp[-1].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
@@ -5237,11 +5251,11 @@ yyreduce:
(yyval.interm).loc = (yyvsp[0].lex).loc;
(yyval.interm).param = param;
}
-#line 5241 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5253 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 113:
-#line 949 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1002 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-2].interm.type).arraySizes) {
parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
@@ -5261,11 +5275,11 @@ yyreduce:
(yyval.interm).loc = (yyvsp[-1].lex).loc;
(yyval.interm).param = param;
}
-#line 5265 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5277 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 114:
-#line 974 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1027 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
if ((yyvsp[-1].interm.type).qualifier.precision != EpqNone)
@@ -5277,11 +5291,11 @@ yyreduce:
parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type);
}
-#line 5281 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5293 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 115:
-#line 985 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1038 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
@@ -5289,11 +5303,11 @@ yyreduce:
parseContext.paramCheckFixStorage((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type);
parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier());
}
-#line 5293 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5305 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 116:
-#line 995 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1048 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
if ((yyvsp[-1].interm.type).qualifier.precision != EpqNone)
@@ -5304,11 +5318,11 @@ yyreduce:
parseContext.parameterTypeCheck((yyvsp[0].interm).loc, (yyvsp[-1].interm.type).qualifier.storage, *(yyval.interm).param.type);
parseContext.paramCheckFix((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, *(yyval.interm).param.type);
}
-#line 5308 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5320 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 117:
-#line 1005 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1058 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
@@ -5316,118 +5330,120 @@ yyreduce:
parseContext.paramCheckFixStorage((yyvsp[0].interm).loc, EvqTemporary, *(yyval.interm).param.type);
parseContext.precisionQualifierCheck((yyval.interm).loc, (yyval.interm).param.type->getBasicType(), (yyval.interm).param.type->getQualifier());
}
-#line 5320 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5332 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 118:
-#line 1015 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1068 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
TParameter param = { 0, new TType((yyvsp[0].interm.type)) };
(yyval.interm).param = param;
if ((yyvsp[0].interm.type).arraySizes)
parseContext.arraySizeRequiredCheck((yyvsp[0].interm.type).loc, *(yyvsp[0].interm.type).arraySizes);
}
-#line 5331 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 119:
-#line 1024 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1077 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[0].interm);
}
-#line 5339 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5351 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 120:
-#line 1027 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1080 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-2].interm);
parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-2].interm).type);
}
-#line 5348 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5360 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 121:
-#line 1031 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1084 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-3].interm);
parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-3].interm).type, (yyvsp[0].interm).arraySizes);
}
-#line 5357 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5369 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 122:
-#line 1035 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1088 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[-5].interm).type;
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-5].interm).type, (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-5].interm).intermNode, initNode, (yyvsp[-1].lex).loc);
}
-#line 5367 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5379 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 123:
-#line 1040 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1093 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[-4].interm).type;
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-4].interm).type, 0, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate((yyvsp[-4].interm).intermNode, initNode, (yyvsp[-1].lex).loc);
}
-#line 5377 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5389 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 124:
-#line 1048 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1101 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[0].interm.type);
(yyval.interm).intermNode = 0;
+
parseContext.declareTypeDefaults((yyval.interm).loc, (yyval.interm).type);
+
}
-#line 5387 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 125:
-#line 1053 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1108 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[-1].interm.type);
(yyval.interm).intermNode = 0;
parseContext.declareVariable((yyvsp[0].lex).loc, *(yyvsp[0].lex).string, (yyvsp[-1].interm.type));
}
-#line 5397 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5411 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 126:
-#line 1058 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1113 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[-2].interm.type);
(yyval.interm).intermNode = 0;
parseContext.declareVariable((yyvsp[-1].lex).loc, *(yyvsp[-1].lex).string, (yyvsp[-2].interm.type), (yyvsp[0].interm).arraySizes);
}
-#line 5407 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5421 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 127:
-#line 1063 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1118 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[-4].interm.type);
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-3].lex).loc, *(yyvsp[-3].lex).string, (yyvsp[-4].interm.type), (yyvsp[-2].interm).arraySizes, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc);
}
-#line 5417 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5431 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 128:
-#line 1068 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1123 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).type = (yyvsp[-3].interm.type);
TIntermNode* initNode = parseContext.declareVariable((yyvsp[-2].lex).loc, *(yyvsp[-2].lex).string, (yyvsp[-3].interm.type), 0, (yyvsp[0].interm.intermTypedNode));
(yyval.interm).intermNode = parseContext.intermediate.growAggregate(0, initNode, (yyvsp[-1].lex).loc);
}
-#line 5427 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 129:
-#line 1077 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1132 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
@@ -5436,14 +5452,13 @@ yyreduce:
parseContext.profileRequires((yyvsp[0].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
parseContext.profileRequires((yyvsp[0].interm.type).loc, EEsProfile, 300, 0, "arrayed type");
}
-
parseContext.precisionQualifierCheck((yyval.interm.type).loc, (yyval.interm.type).basicType, (yyval.interm.type).qualifier);
}
-#line 5443 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 130:
-#line 1088 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1142 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.globalQualifierFixCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier);
parseContext.globalQualifierTypeCheck((yyvsp[-1].interm.type).loc, (yyvsp[-1].interm.type).qualifier, (yyvsp[0].interm.type));
@@ -5468,22 +5483,22 @@ yyreduce:
(parseContext.language == EShLangFragment && (yyval.interm.type).qualifier.storage == EvqVaryingIn)))
(yyval.interm.type).qualifier.smooth = true;
}
-#line 5472 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 131:
-#line 1115 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1169 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.globalCheck((yyvsp[0].lex).loc, "invariant");
parseContext.profileRequires((yyval.interm.type).loc, ENoProfile, 120, 0, "invariant");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.invariant = true;
}
-#line 5483 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 132:
-#line 1124 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1178 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.globalCheck((yyvsp[0].lex).loc, "smooth");
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "smooth");
@@ -5491,11 +5506,11 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.smooth = true;
}
-#line 5495 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5508 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 133:
-#line 1131 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1185 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.globalCheck((yyvsp[0].lex).loc, "flat");
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "flat");
@@ -5503,58 +5518,49 @@ yyreduce:
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.flat = true;
}
-#line 5507 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5520 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 134:
-#line 1138 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1193 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.globalCheck((yyvsp[0].lex).loc, "noperspective");
-#ifdef NV_EXTENSIONS
parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 0, E_GL_NV_shader_noperspective_interpolation, "noperspective");
-#else
- parseContext.requireProfile((yyvsp[0].lex).loc, ~EEsProfile, "noperspective");
-#endif
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "noperspective");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.nopersp = true;
}
-#line 5523 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5532 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 135:
-#line 1149 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1200 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "__explicitInterpAMD");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation");
parseContext.profileRequires((yyvsp[0].lex).loc, ECompatibilityProfile, 450, E_GL_AMD_shader_explicit_vertex_parameter, "explicit interpolation");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.explicitInterp = true;
-#endif
}
-#line 5537 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5544 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 136:
-#line 1158 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1207 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "pervertexNV");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric");
parseContext.profileRequires((yyvsp[0].lex).loc, ECompatibilityProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric");
parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 0, E_GL_NV_fragment_shader_barycentric, "fragment shader barycentric");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.pervertexNV = true;
-#endif
}
-#line 5552 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5557 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 137:
-#line 1168 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1215 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck((yyvsp[0].lex).loc, "perprimitiveNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangFragmentMask | EShLangMeshNVMask), "perprimitiveNV");
@@ -5563,114 +5569,109 @@ yyreduce:
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_NV_mesh_shader, "perprimitiveNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.perPrimitiveNV = true;
-#endif
}
-#line 5569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 138:
-#line 1180 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1225 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck((yyvsp[0].lex).loc, "perviewNV");
parseContext.requireStage((yyvsp[0].lex).loc, EShLangMeshNV, "perviewNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.perViewNV = true;
-#endif
}
-#line 5583 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5584 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 139:
-#line 1189 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1232 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
// No need for profile version or extension check. Shader stage already checks both.
parseContext.globalCheck((yyvsp[0].lex).loc, "taskNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTaskNVMask | EShLangMeshNVMask), "taskNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.perTaskNV = true;
-#endif
}
-#line 5597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 140:
-#line 1201 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1243 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[-1].interm.type);
}
-#line 5605 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5604 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 141:
-#line 1207 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1249 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5613 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5612 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 142:
-#line 1210 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1252 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[-2].interm.type);
(yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers);
parseContext.mergeObjectLayoutQualifiers((yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false);
}
-#line 5623 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5622 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 143:
-#line 1217 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1259 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), *(yyvsp[0].lex).string);
}
-#line 5632 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5631 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 144:
-#line 1221 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1263 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[-2].lex).loc);
parseContext.setLayoutQualifier((yyvsp[-2].lex).loc, (yyval.interm.type), *(yyvsp[-2].lex).string, (yyvsp[0].interm.intermTypedNode));
}
-#line 5641 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5640 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 145:
-#line 1225 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1267 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ // because "shared" is both an identifier and a keyword
(yyval.interm.type).init((yyvsp[0].lex).loc);
TString strShared("shared");
parseContext.setLayoutQualifier((yyvsp[0].lex).loc, (yyval.interm.type), strShared);
}
-#line 5651 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5650 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 146:
-#line 1233 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1276 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.profileRequires((yyval.interm.type).loc, ECoreProfile | ECompatibilityProfile, 400, E_GL_ARB_gpu_shader5, "precise");
parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 320, Num_AEP_gpu_shader5, AEP_gpu_shader5, "precise");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.noContraction = true;
}
-#line 5662 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5661 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 147:
-#line 1242 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1286 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5670 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 148:
-#line 1245 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1289 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[-1].interm.type);
if ((yyval.interm.type).basicType == EbtVoid)
@@ -5679,446 +5680,432 @@ yyreduce:
(yyval.interm.type).shaderQualifiers.merge((yyvsp[0].interm.type).shaderQualifiers);
parseContext.mergeQualifiers((yyval.interm.type).loc, (yyval.interm.type).qualifier, (yyvsp[0].interm.type).qualifier, false);
}
-#line 5683 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5682 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 149:
-#line 1256 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1300 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5691 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5690 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 150:
-#line 1259 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1303 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5699 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5698 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 151:
-#line 1262 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1306 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.checkPrecisionQualifier((yyvsp[0].interm.type).loc, (yyvsp[0].interm.type).qualifier.precision);
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5708 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5707 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 152:
-#line 1266 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1310 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// allow inheritance of storage qualifier from block declaration
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5717 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5716 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 153:
-#line 1270 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1314 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// allow inheritance of storage qualifier from block declaration
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5726 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5725 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 154:
-#line 1274 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1319 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// allow inheritance of storage qualifier from block declaration
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5735 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5734 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 155:
-#line 1278 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1323 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
}
-#line 5743 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5742 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 156:
-#line 1284 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1330 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqConst; // will later turn into EvqConstReadOnly, if the initializer is not constant
}
-#line 5752 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5751 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 157:
-#line 1288 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1334 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.requireStage((yyvsp[0].lex).loc, EShLangVertex, "attribute");
- parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "attribute");
- parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "attribute");
- parseContext.requireNotRemoved((yyvsp[0].lex).loc, ECoreProfile, 420, "attribute");
- parseContext.requireNotRemoved((yyvsp[0].lex).loc, EEsProfile, 300, "attribute");
-
- parseContext.globalCheck((yyvsp[0].lex).loc, "attribute");
-
+ parseContext.globalCheck((yyvsp[0].lex).loc, "inout");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.storage = EvqVaryingIn;
+ (yyval.interm.type).qualifier.storage = EvqInOut;
}
-#line 5769 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5761 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 158:
-#line 1300 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1339 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "varying");
- parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "varying");
- parseContext.requireNotRemoved((yyvsp[0].lex).loc, ECoreProfile, 420, "varying");
- parseContext.requireNotRemoved((yyvsp[0].lex).loc, EEsProfile, 300, "varying");
-
- parseContext.globalCheck((yyvsp[0].lex).loc, "varying");
-
+ parseContext.globalCheck((yyvsp[0].lex).loc, "in");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- if (parseContext.language == EShLangVertex)
- (yyval.interm.type).qualifier.storage = EvqVaryingOut;
- else
- (yyval.interm.type).qualifier.storage = EvqVaryingIn;
+ // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later
+ (yyval.interm.type).qualifier.storage = EvqIn;
}
-#line 5788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5772 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 159:
-#line 1314 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1345 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "inout");
+ parseContext.globalCheck((yyvsp[0].lex).loc, "out");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.storage = EvqInOut;
+ // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later
+ (yyval.interm.type).qualifier.storage = EvqOut;
}
-#line 5798 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5783 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 160:
-#line 1319 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1351 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "in");
+ parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 120, 0, "centroid");
+ parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "centroid");
+ parseContext.globalCheck((yyvsp[0].lex).loc, "centroid");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- // whether this is a parameter "in" or a pipeline "in" will get sorted out a bit later
- (yyval.interm.type).qualifier.storage = EvqIn;
+ (yyval.interm.type).qualifier.centroid = true;
}
-#line 5809 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5795 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 161:
-#line 1325 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1358 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "out");
+ parseContext.globalCheck((yyvsp[0].lex).loc, "uniform");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- // whether this is a parameter "out" or a pipeline "out" will get sorted out a bit later
- (yyval.interm.type).qualifier.storage = EvqOut;
+ (yyval.interm.type).qualifier.storage = EvqUniform;
}
-#line 5820 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5805 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 162:
-#line 1331 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1363 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 120, 0, "centroid");
- parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 300, 0, "centroid");
- parseContext.globalCheck((yyvsp[0].lex).loc, "centroid");
+ parseContext.globalCheck((yyvsp[0].lex).loc, "shared");
+ parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared");
+ parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 310, 0, "shared");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshNVMask | EShLangTaskNVMask), "shared");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.centroid = true;
+ (yyval.interm.type).qualifier.storage = EvqShared;
}
-#line 5832 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5818 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 163:
-#line 1338 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1371 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "patch");
- parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch");
+ parseContext.globalCheck((yyvsp[0].lex).loc, "buffer");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.patch = true;
+ (yyval.interm.type).qualifier.storage = EvqBuffer;
}
-#line 5843 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5828 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 164:
-#line 1344 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1377 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "sample");
+ parseContext.requireStage((yyvsp[0].lex).loc, EShLangVertex, "attribute");
+ parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "attribute");
+ parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "attribute");
+ parseContext.requireNotRemoved((yyvsp[0].lex).loc, ECoreProfile, 420, "attribute");
+ parseContext.requireNotRemoved((yyvsp[0].lex).loc, EEsProfile, 300, "attribute");
+
+ parseContext.globalCheck((yyvsp[0].lex).loc, "attribute");
+
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.sample = true;
+ (yyval.interm.type).qualifier.storage = EvqVaryingIn;
}
-#line 5853 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5845 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 165:
-#line 1349 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1389 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "uniform");
+ parseContext.checkDeprecated((yyvsp[0].lex).loc, ENoProfile, 130, "varying");
+ parseContext.checkDeprecated((yyvsp[0].lex).loc, ECoreProfile, 130, "varying");
+ parseContext.requireNotRemoved((yyvsp[0].lex).loc, ECoreProfile, 420, "varying");
+ parseContext.requireNotRemoved((yyvsp[0].lex).loc, EEsProfile, 300, "varying");
+
+ parseContext.globalCheck((yyvsp[0].lex).loc, "varying");
+
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.storage = EvqUniform;
+ if (parseContext.language == EShLangVertex)
+ (yyval.interm.type).qualifier.storage = EvqVaryingOut;
+ else
+ (yyval.interm.type).qualifier.storage = EvqVaryingIn;
}
-#line 5863 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 166:
-#line 1354 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1403 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.globalCheck((yyvsp[0].lex).loc, "buffer");
+ parseContext.globalCheck((yyvsp[0].lex).loc, "patch");
+ parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangTessControlMask | EShLangTessEvaluationMask), "patch");
(yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.storage = EvqBuffer;
+ (yyval.interm.type).qualifier.patch = true;
}
-#line 5873 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5875 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 167:
-#line 1359 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1409 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ {
+ parseContext.globalCheck((yyvsp[0].lex).loc, "sample");
+ (yyval.interm.type).init((yyvsp[0].lex).loc);
+ (yyval.interm.type).qualifier.sample = true;
+ }
+#line 5885 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 168:
+#line 1414 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "hitAttributeNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangIntersectNVMask | EShLangClosestHitNVMask
| EShLangAnyHitNVMask), "hitAttributeNV");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "hitAttributeNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqHitAttrNV;
-#endif
}
-#line 5888 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5898 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 168:
-#line 1369 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 169:
+#line 1422 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenNVMask | EShLangClosestHitNVMask |
EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadNV");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqPayloadNV;
-#endif
}
-#line 5903 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5911 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 169:
-#line 1379 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 170:
+#line 1430 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "rayPayloadInNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangClosestHitNVMask |
EShLangAnyHitNVMask | EShLangMissNVMask), "rayPayloadInNV");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "rayPayloadInNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqPayloadInNV;
-#endif
}
-#line 5918 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5924 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 170:
-#line 1389 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 171:
+#line 1438 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangRayGenNVMask |
EShLangClosestHitNVMask | EShLangMissNVMask | EShLangCallableNVMask), "callableDataNV");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqCallableDataNV;
-#endif
}
-#line 5933 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5937 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 171:
-#line 1399 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 172:
+#line 1446 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
parseContext.globalCheck((yyvsp[0].lex).loc, "callableDataInNV");
parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangCallableNVMask), "callableDataInNV");
parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile, 460, E_GL_NV_ray_tracing, "callableDataInNV");
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.storage = EvqCallableDataInNV;
-#endif
- }
-#line 5947 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
- break;
-
- case 172:
-#line 1408 "MachineIndependent/glslang.y" /* yacc.c:1646 */
- {
- parseContext.globalCheck((yyvsp[0].lex).loc, "shared");
- parseContext.profileRequires((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, 430, E_GL_ARB_compute_shader, "shared");
- parseContext.profileRequires((yyvsp[0].lex).loc, EEsProfile, 310, 0, "shared");
-#ifdef NV_EXTENSIONS
- parseContext.requireStage((yyvsp[0].lex).loc, (EShLanguageMask)(EShLangComputeMask | EShLangMeshNVMask | EShLangTaskNVMask), "shared");
-#else
- parseContext.requireStage((yyvsp[0].lex).loc, EShLangCompute, "shared");
-#endif
- (yyval.interm.type).init((yyvsp[0].lex).loc);
- (yyval.interm.type).qualifier.storage = EvqShared;
}
-#line 5964 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5949 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 173:
-#line 1420 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1453 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.coherent = true;
}
-#line 5973 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5958 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 174:
-#line 1424 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1457 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "devicecoherent");
(yyval.interm.type).qualifier.devicecoherent = true;
}
-#line 5983 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5968 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 175:
-#line 1429 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1462 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "queuefamilycoherent");
(yyval.interm.type).qualifier.queuefamilycoherent = true;
}
-#line 5993 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5978 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 176:
-#line 1434 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1467 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "workgroupcoherent");
(yyval.interm.type).qualifier.workgroupcoherent = true;
}
-#line 6003 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5988 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 177:
-#line 1439 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1472 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "subgroupcoherent");
(yyval.interm.type).qualifier.subgroupcoherent = true;
}
-#line 6013 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 5998 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 178:
-#line 1444 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1477 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
parseContext.requireExtensions((yyvsp[0].lex).loc, 1, &E_GL_KHR_memory_scope_semantics, "nonprivate");
(yyval.interm.type).qualifier.nonprivate = true;
}
-#line 6023 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6008 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 179:
-#line 1449 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1482 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.volatil = true;
}
-#line 6032 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6017 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 180:
-#line 1453 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1486 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.restrict = true;
}
-#line 6041 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6026 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 181:
-#line 1457 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1490 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.readonly = true;
}
-#line 6050 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6035 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 182:
-#line 1461 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1494 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.writeonly = true;
}
-#line 6059 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6044 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 183:
-#line 1465 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1498 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.spvRemoved((yyvsp[0].lex).loc, "subroutine");
parseContext.globalCheck((yyvsp[0].lex).loc, "subroutine");
parseContext.unimplemented((yyvsp[0].lex).loc, "subroutine");
(yyval.interm.type).init((yyvsp[0].lex).loc);
}
-#line 6070 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6055 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 184:
-#line 1471 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1504 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.spvRemoved((yyvsp[-3].lex).loc, "subroutine");
parseContext.globalCheck((yyvsp[-3].lex).loc, "subroutine");
parseContext.unimplemented((yyvsp[-3].lex).loc, "subroutine");
(yyval.interm.type).init((yyvsp[-3].lex).loc);
}
-#line 6081 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 185:
-#line 1480 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1515 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc);
(yyval.interm.type).qualifier.nonUniform = true;
}
-#line 6090 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6075 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 186:
-#line 1487 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1522 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// TODO
}
-#line 6098 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6083 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 187:
-#line 1490 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1525 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// TODO: 4.0 semantics: subroutines
// 1) make sure each identifier is a type declared earlier with SUBROUTINE
// 2) save all of the identifiers for future comparison with the declared function
}
-#line 6108 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 188:
-#line 1498 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1534 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[-1].interm.type);
(yyval.interm.type).qualifier.precision = parseContext.getDefaultPrecision((yyval.interm.type));
(yyval.interm.type).typeParameters = (yyvsp[0].interm.typeParameters);
}
-#line 6118 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 189:
-#line 1503 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1539 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.arrayOfArrayVersionCheck((yyvsp[0].interm).loc, (yyvsp[0].interm).arraySizes);
(yyval.interm.type) = (yyvsp[-2].interm.type);
@@ -6126,21 +6113,21 @@ yyreduce:
(yyval.interm.type).typeParameters = (yyvsp[-1].interm.typeParameters);
(yyval.interm.type).arraySizes = (yyvsp[0].interm).arraySizes;
}
-#line 6130 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6115 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 190:
-#line 1513 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1549 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[-1].lex).loc;
(yyval.interm).arraySizes = new TArraySizes;
(yyval.interm).arraySizes->addInnerSize();
}
-#line 6140 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6125 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 191:
-#line 1518 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1554 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm).loc = (yyvsp[-2].lex).loc;
(yyval.interm).arraySizes = new TArraySizes;
@@ -6149,20 +6136,20 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size, "array size");
(yyval.interm).arraySizes->addInnerSize(size);
}
-#line 6153 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6138 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 192:
-#line 1526 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1562 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-2].interm);
(yyval.interm).arraySizes->addInnerSize();
}
-#line 6162 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 193:
-#line 1530 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1566 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm) = (yyvsp[-3].interm);
@@ -6170,35 +6157,35 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[-1].interm.intermTypedNode)->getLoc(), (yyvsp[-1].interm.intermTypedNode), size, "array size");
(yyval.interm).arraySizes->addInnerSize(size);
}
-#line 6174 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6159 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 194:
-#line 1540 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1576 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeParameters) = (yyvsp[0].interm.typeParameters);
}
-#line 6182 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 195:
-#line 1543 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1579 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeParameters) = 0;
}
-#line 6190 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6175 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 196:
-#line 1549 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1585 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeParameters) = (yyvsp[-1].interm.typeParameters);
}
-#line 6198 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6183 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 197:
-#line 1555 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1591 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeParameters) = new TArraySizes;
@@ -6206,11 +6193,11 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode), size, "type parameter");
(yyval.interm.typeParameters)->addInnerSize(size);
}
-#line 6210 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6195 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 198:
-#line 1562 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1598 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeParameters) = (yyvsp[-2].interm.typeParameters);
@@ -6218,1985 +6205,1974 @@ yyreduce:
parseContext.arraySizeCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode), size, "type parameter");
(yyval.interm.typeParameters)->addInnerSize(size);
}
-#line 6222 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6207 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 199:
-#line 1572 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1608 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtVoid;
}
-#line 6231 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6216 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 200:
-#line 1576 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1612 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
}
-#line 6240 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6225 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 201:
-#line 1580 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1616 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
+ (yyval.interm.type).basicType = EbtInt;
}
-#line 6250 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6234 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 202:
-#line 1585 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1620 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "float16_t", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat16;
+ (yyval.interm.type).basicType = EbtUint;
}
-#line 6260 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6244 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 203:
-#line 1590 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1625 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).basicType = EbtBool;
}
-#line 6270 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6253 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 204:
-#line 1595 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1629 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setVector(2);
}
-#line 6280 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6263 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 205:
-#line 1600 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1634 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setVector(3);
}
-#line 6289 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6273 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 206:
-#line 1604 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1639 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setVector(4);
}
-#line 6299 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6283 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 207:
-#line 1609 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1644 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt8;
+ (yyval.interm.type).basicType = EbtBool;
+ (yyval.interm.type).setVector(2);
}
-#line 6309 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6293 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 208:
-#line 1614 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1649 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).basicType = EbtBool;
+ (yyval.interm.type).setVector(3);
}
-#line 6319 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 209:
-#line 1619 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1654 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt16;
+ (yyval.interm.type).basicType = EbtBool;
+ (yyval.interm.type).setVector(4);
}
-#line 6329 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6313 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 210:
-#line 1624 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1659 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).setVector(2);
}
-#line 6339 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6323 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 211:
-#line 1629 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1664 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).setVector(3);
}
-#line 6349 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6333 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 212:
-#line 1634 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1669 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).setVector(4);
}
-#line 6359 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6343 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 213:
-#line 1639 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1674 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt64;
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).setVector(2);
}
-#line 6369 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6354 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 214:
-#line 1644 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1680 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).setVector(3);
}
-#line 6379 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6365 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 215:
-#line 1649 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1686 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtBool;
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).setVector(4);
}
-#line 6388 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6376 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 216:
-#line 1653 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1692 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).setMatrix(2, 2);
}
-#line 6398 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 217:
-#line 1658 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1697 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).setMatrix(3, 3);
}
-#line 6408 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6396 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 218:
-#line 1663 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1702 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).setMatrix(4, 4);
}
-#line 6418 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6406 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 219:
-#line 1668 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1707 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setMatrix(2, 2);
}
-#line 6429 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6416 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 220:
-#line 1674 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1712 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setMatrix(2, 3);
}
-#line 6440 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6426 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 221:
-#line 1680 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1717 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setMatrix(2, 4);
}
-#line 6451 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6436 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 222:
-#line 1686 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1722 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat16;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setMatrix(3, 2);
}
-#line 6462 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6446 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 223:
-#line 1692 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1727 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat16;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setMatrix(3, 3);
}
-#line 6473 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 224:
-#line 1698 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1732 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat16;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).basicType = EbtFloat;
+ (yyval.interm.type).setMatrix(3, 4);
}
-#line 6484 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6466 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 225:
-#line 1704 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1737 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).setMatrix(4, 2);
}
-#line 6495 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6476 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 226:
-#line 1710 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1742 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).setMatrix(4, 3);
}
-#line 6506 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6486 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 227:
-#line 1716 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1747 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).setMatrix(4, 4);
}
-#line 6517 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 228:
-#line 1722 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1753 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
- (yyval.interm.type).setVector(2);
}
-#line 6528 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6506 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 229:
-#line 1728 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1758 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "float16_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).basicType = EbtFloat16;
}
-#line 6539 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6516 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 230:
-#line 1734 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1763 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtDouble;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).basicType = EbtFloat;
}
-#line 6550 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6526 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 231:
-#line 1740 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1768 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtBool;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).basicType = EbtDouble;
}
-#line 6560 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6536 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 232:
-#line 1745 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1773 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtBool;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).basicType = EbtInt8;
}
-#line 6570 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 233:
-#line 1750 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1778 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtBool;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).basicType = EbtUint8;
}
-#line 6580 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6556 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 234:
-#line 1755 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1783 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).basicType = EbtInt16;
}
-#line 6590 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 235:
-#line 1760 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1788 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).basicType = EbtUint16;
}
-#line 6600 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6576 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 236:
-#line 1765 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1793 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtInt;
- (yyval.interm.type).setVector(4);
}
-#line 6610 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6586 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 237:
-#line 1770 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1798 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt8;
- (yyval.interm.type).setVector(2);
+ (yyval.interm.type).basicType = EbtUint;
}
-#line 6621 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 238:
-#line 1776 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1803 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt8;
- (yyval.interm.type).setVector(3);
+ (yyval.interm.type).basicType = EbtInt64;
}
-#line 6632 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6606 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 239:
-#line 1782 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1808 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt8;
- (yyval.interm.type).setVector(4);
+ (yyval.interm.type).basicType = EbtUint64;
}
-#line 6643 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6616 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 240:
-#line 1788 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1813 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt16;
+ (yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(2);
}
-#line 6654 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6627 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 241:
-#line 1794 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1819 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt16;
+ (yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(3);
}
-#line 6665 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6638 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 242:
-#line 1800 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1825 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double vector");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double vector");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt16;
+ (yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(4);
}
-#line 6676 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 243:
-#line 1806 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1831 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setVector(2);
}
-#line 6687 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6660 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 244:
-#line 1812 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1837 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setVector(3);
}
-#line 6698 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6671 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 245:
-#line 1818 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1843 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.float16ScalarVectorCheck((yyvsp[0].lex).loc, "half float vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setVector(4);
}
-#line 6709 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6682 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 246:
-#line 1824 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1849 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt64;
+ (yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(2);
}
-#line 6720 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6693 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 247:
-#line 1830 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1855 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt64;
+ (yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(3);
}
-#line 6731 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6704 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 248:
-#line 1836 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1861 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtInt64;
+ (yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setVector(4);
}
-#line 6742 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6715 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 249:
-#line 1842 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1867 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
+ parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(2);
}
-#line 6753 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6726 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 250:
-#line 1848 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1873 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
+ parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(3);
}
-#line 6764 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6737 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 251:
-#line 1854 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1879 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.fullIntegerCheck((yyvsp[0].lex).loc, "unsigned integer vector");
+ parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setVector(4);
}
-#line 6775 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6748 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 252:
-#line 1860 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1885 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).basicType = EbtInt8;
(yyval.interm.type).setVector(2);
}
-#line 6786 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6759 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 253:
-#line 1866 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1891 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).basicType = EbtInt8;
(yyval.interm.type).setVector(3);
}
-#line 6797 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6770 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 254:
-#line 1872 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1897 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).basicType = EbtInt8;
(yyval.interm.type).setVector(4);
}
-#line 6808 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6781 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 255:
-#line 1878 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1903 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).basicType = EbtInt16;
(yyval.interm.type).setVector(2);
}
-#line 6819 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6792 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 256:
-#line 1884 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1909 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).basicType = EbtInt16;
(yyval.interm.type).setVector(3);
}
-#line 6830 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6803 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 257:
-#line 1890 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1915 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).basicType = EbtInt16;
(yyval.interm.type).setVector(4);
}
-#line 6841 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6814 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 258:
-#line 1896 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1921 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(2);
}
-#line 6852 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6825 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 259:
-#line 1902 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1927 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(3);
}
-#line 6863 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 260:
-#line 1908 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1933 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit signed integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).basicType = EbtInt;
(yyval.interm.type).setVector(4);
}
-#line 6874 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6847 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 261:
-#line 1914 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1939 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).basicType = EbtInt64;
(yyval.interm.type).setVector(2);
}
-#line 6885 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6858 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 262:
-#line 1920 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1945 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).basicType = EbtInt64;
(yyval.interm.type).setVector(3);
}
-#line 6896 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6869 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 263:
-#line 1926 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1951 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).basicType = EbtInt64;
(yyval.interm.type).setVector(4);
}
-#line 6907 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6880 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 264:
-#line 1932 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1957 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(2, 2);
+ (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).setVector(2);
}
-#line 6917 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6891 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 265:
-#line 1937 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1963 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(3, 3);
+ (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).setVector(3);
}
-#line 6927 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6902 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 266:
-#line 1942 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1969 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int8ScalarVectorCheck((yyvsp[0].lex).loc, "8-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(4, 4);
+ (yyval.interm.type).basicType = EbtUint8;
+ (yyval.interm.type).setVector(4);
}
-#line 6937 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6913 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 267:
-#line 1947 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1975 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(2, 2);
+ (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).setVector(2);
}
-#line 6947 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6924 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 268:
-#line 1952 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1981 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(2, 3);
+ (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).setVector(3);
}
-#line 6957 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6935 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 269:
-#line 1957 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1987 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int16ScalarVectorCheck((yyvsp[0].lex).loc, "16-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(2, 4);
+ (yyval.interm.type).basicType = EbtUint16;
+ (yyval.interm.type).setVector(4);
}
-#line 6967 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 270:
-#line 1962 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1993 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(3, 2);
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).setVector(2);
}
-#line 6977 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6957 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 271:
-#line 1967 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 1999 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(3, 3);
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).setVector(3);
}
-#line 6987 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6968 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 272:
-#line 1972 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2005 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.explicitInt32Check((yyvsp[0].lex).loc, "32-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(3, 4);
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).setVector(4);
}
-#line 6997 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6979 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 273:
-#line 1977 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2011 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(4, 2);
+ (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).setVector(2);
}
-#line 7007 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 6990 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 274:
-#line 1982 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2017 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(4, 3);
+ (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).setVector(3);
}
-#line 7017 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7001 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 275:
-#line 1987 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2023 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.int64Check((yyvsp[0].lex).loc, "64-bit unsigned integer vector", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
- (yyval.interm.type).basicType = EbtFloat;
- (yyval.interm.type).setMatrix(4, 4);
+ (yyval.interm.type).basicType = EbtUint64;
+ (yyval.interm.type).setVector(4);
}
-#line 7027 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 276:
-#line 1992 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2029 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7038 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7023 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 277:
-#line 1998 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2035 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7049 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7034 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 278:
-#line 2004 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2041 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7060 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7045 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 279:
-#line 2010 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2047 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7071 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7056 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 280:
-#line 2016 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2053 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 7082 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7067 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 281:
-#line 2022 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2059 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 7093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7078 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 282:
-#line 2028 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2065 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 7104 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7089 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 283:
-#line 2034 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2071 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7115 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7100 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 284:
-#line 2040 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2077 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 7126 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7111 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 285:
-#line 2046 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2083 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 7137 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7122 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 286:
-#line 2052 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2089 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 7148 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7133 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 287:
-#line 2058 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2095 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
- parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
+ parseContext.requireProfile((yyvsp[0].lex).loc, ECoreProfile | ECompatibilityProfile, "double matrix");
+ if (! parseContext.symbolTable.atBuiltInLevel())
+ parseContext.doubleCheck((yyvsp[0].lex).loc, "double matrix");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7159 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7144 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 288:
-#line 2064 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2101 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7170 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7155 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 289:
-#line 2070 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2107 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7181 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 290:
-#line 2076 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2113 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7192 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7177 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 291:
-#line 2082 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2119 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7203 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7188 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 292:
-#line 2088 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2125 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 7214 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7199 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 293:
-#line 2094 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2131 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 7225 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7210 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 294:
-#line 2100 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2137 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 7236 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7221 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 295:
-#line 2106 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2143 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7247 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7232 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 296:
-#line 2112 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2149 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 7258 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7243 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 297:
-#line 2118 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2155 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 7269 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7254 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 298:
-#line 2124 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2161 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 7280 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7265 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 299:
-#line 2130 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2167 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.float16Check((yyvsp[0].lex).loc, "half float matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat16;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7291 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 300:
-#line 2136 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2173 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7302 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7287 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 301:
-#line 2142 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2179 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7313 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7298 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 302:
-#line 2148 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2185 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7324 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7309 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 303:
-#line 2154 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2191 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7335 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7320 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 304:
-#line 2160 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2197 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 7346 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7331 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 305:
-#line 2166 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2203 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 7357 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7342 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 306:
-#line 2172 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2209 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 7368 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7353 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 307:
-#line 2178 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2215 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7379 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7364 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 308:
-#line 2184 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2221 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 7390 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7375 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 309:
-#line 2190 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2227 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 7401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7386 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 310:
-#line 2196 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2233 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 7412 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7397 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 311:
-#line 2202 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2239 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat32Check((yyvsp[0].lex).loc, "float32_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7423 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7408 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 312:
-#line 2208 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2245 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7434 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7419 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 313:
-#line 2214 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2251 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7445 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7430 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 314:
-#line 2220 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2257 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7456 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 315:
-#line 2226 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2263 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 2);
}
-#line 7467 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7452 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 316:
-#line 2232 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2269 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 3);
}
-#line 7478 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7463 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 317:
-#line 2238 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2275 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(2, 4);
}
-#line 7489 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7474 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 318:
-#line 2244 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2281 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 2);
}
-#line 7500 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 319:
-#line 2250 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2287 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 3);
}
-#line 7511 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7496 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 320:
-#line 2256 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2293 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(3, 4);
}
-#line 7522 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7507 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 321:
-#line 2262 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2299 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 2);
}
-#line 7533 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7518 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 322:
-#line 2268 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2305 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 3);
}
-#line 7544 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7529 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 323:
-#line 2274 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2311 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.explicitFloat64Check((yyvsp[0].lex).loc, "float64_t matrix", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtDouble;
(yyval.interm.type).setMatrix(4, 4);
}
-#line 7555 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7540 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 324:
-#line 2280 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2317 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef NV_EXTENSIONS
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtAccStructNV;
-#endif
}
-#line 7566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7549 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 325:
-#line 2286 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2321 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.vulkanRemoved((yyvsp[0].lex).loc, "atomic counter types");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtAtomicUint;
}
-#line 7576 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7559 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 326:
-#line 2291 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2326 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd1D);
}
-#line 7586 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 327:
-#line 2296 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2332 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D);
}
-#line 7596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7579 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 328:
-#line 2301 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2337 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd3D);
}
-#line 7606 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7589 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 329:
-#line 2306 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2342 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube);
}
-#line 7616 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7599 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 330:
-#line 2311 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2347 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd1D, false, true);
+ (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, true);
}
-#line 7626 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 331:
-#line 2316 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2352 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, true);
+ (yyval.interm.type).sampler.set(EbtFloat, EsdCube, false, true);
}
-#line 7636 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7619 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 332:
-#line 2321 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2357 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, EsdCube, false, true);
+ (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true);
}
-#line 7646 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7629 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 333:
-#line 2326 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2362 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true);
+ (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, true);
}
-#line 7656 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7639 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 334:
-#line 2331 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2368 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true);
+ (yyval.interm.type).sampler.set(EbtFloat, Esd1D, false, true);
}
-#line 7666 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 335:
-#line 2336 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2373 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true, true);
+ (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true);
}
-#line 7676 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7659 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 336:
-#line 2341 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2378 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, true);
+ (yyval.interm.type).sampler.set(EbtFloat, Esd1D, true, true);
}
-#line 7686 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 337:
-#line 2346 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2383 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube, true);
}
-#line 7696 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7679 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 338:
-#line 2351 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2388 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, EsdCube, true, true);
}
-#line 7706 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7689 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 339:
-#line 2356 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2393 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D);
-#endif
}
-#line 7719 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7700 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 340:
-#line 2364 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2399 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D);
-#endif
}
-#line 7732 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7711 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 341:
-#line 2372 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2405 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd3D);
-#endif
}
-#line 7745 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7722 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 342:
-#line 2380 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2411 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube);
-#endif
}
-#line 7758 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7733 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 343:
-#line 2388 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2417 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D, false, true);
-#endif
}
-#line 7771 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7744 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 344:
-#line 2396 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2423 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, true);
-#endif
}
-#line 7784 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7755 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 345:
-#line 2404 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2429 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube, false, true);
-#endif
}
-#line 7797 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7766 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 346:
-#line 2412 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2435 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D, true);
-#endif
}
-#line 7810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7777 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 347:
-#line 2420 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2441 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true);
-#endif
}
-#line 7823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 348:
-#line 2428 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2447 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd1D, true, true);
-#endif
}
-#line 7836 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7799 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 349:
-#line 2436 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2453 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, true);
-#endif
}
-#line 7849 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 350:
-#line 2444 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2459 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube, true);
-#endif
}
-#line 7862 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7821 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 351:
-#line 2452 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2465 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat16, EsdCube, true, true);
-#endif
}
-#line 7875 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7832 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 352:
-#line 2460 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2471 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd1D);
}
-#line 7885 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 353:
-#line 2465 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2477 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd2D);
}
-#line 7895 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7852 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 354:
-#line 2470 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2482 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, Esd3D);
}
-#line 7905 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7862 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 355:
-#line 2475 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2487 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtInt, EsdCube);
}
-#line 7915 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7872 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 356:
-#line 2480 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2492 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, Esd1D, true);
+ (yyval.interm.type).sampler.set(EbtInt, Esd2D, true);
}
-#line 7925 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7882 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 357:
-#line 2485 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2497 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, Esd2D, true);
+ (yyval.interm.type).sampler.set(EbtUint, Esd2D);
}
-#line 7935 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7892 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 358:
-#line 2490 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2502 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, EsdCube, true);
+ (yyval.interm.type).sampler.set(EbtUint, Esd3D);
}
-#line 7945 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7902 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 359:
-#line 2495 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2507 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, Esd1D);
+ (yyval.interm.type).sampler.set(EbtUint, EsdCube);
}
-#line 7955 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7912 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 360:
-#line 2500 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2513 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, Esd2D);
+ (yyval.interm.type).sampler.set(EbtInt, Esd1D, true);
}
-#line 7965 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7922 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 361:
-#line 2505 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2518 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, Esd3D);
+ (yyval.interm.type).sampler.set(EbtInt, EsdCube, true);
}
-#line 7975 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7932 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 362:
-#line 2510 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2523 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, EsdCube);
+ (yyval.interm.type).sampler.set(EbtUint, Esd1D);
}
-#line 7985 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 363:
-#line 2515 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2528 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtUint, Esd1D, true);
}
-#line 7995 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7952 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 364:
-#line 2520 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2533 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, Esd2D, true);
+ (yyval.interm.type).sampler.set(EbtUint, EsdCube, true);
}
-#line 8005 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7962 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 365:
-#line 2525 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2538 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, EsdCube, true);
+ (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube, true);
}
-#line 8015 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7972 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 366:
-#line 2530 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2543 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, EsdRect);
+ (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube, true);
}
-#line 8025 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7982 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 367:
-#line 2535 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2548 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, EsdRect, false, true);
+ (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube, true);
}
-#line 8035 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 7992 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 368:
-#line 2540 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2554 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat16, EsdRect);
-#endif
+ (yyval.interm.type).sampler.set(EbtUint, Esd2D, true);
}
-#line 8048 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8002 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 369:
-#line 2548 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2559 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat16, EsdRect, false, true);
-#endif
+ (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D);
}
-#line 8061 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8012 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 370:
-#line 2556 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2564 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, EsdRect);
+ (yyval.interm.type).sampler.setTexture(EbtFloat, Esd3D);
}
-#line 8071 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8022 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 371:
-#line 2561 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2569 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, EsdRect);
+ (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true);
}
-#line 8081 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8032 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 372:
-#line 2566 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2574 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, EsdBuffer);
+ (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube);
}
-#line 8091 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8042 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 373:
-#line 2571 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2579 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat16, EsdBuffer);
-#endif
+ (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D);
}
-#line 8104 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8052 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 374:
-#line 2579 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2584 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, EsdBuffer);
+ (yyval.interm.type).sampler.setTexture(EbtInt, Esd3D);
}
-#line 8114 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8062 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 375:
-#line 2584 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2589 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, EsdBuffer);
+ (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube);
}
-#line 8124 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8072 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 376:
-#line 2589 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2594 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, false, true);
+ (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true);
}
-#line 8134 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8082 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 377:
-#line 2594 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2599 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, false, true);
-#endif
+ (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D);
}
-#line 8147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8092 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 378:
-#line 2602 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2604 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, Esd2D, false, false, true);
+ (yyval.interm.type).sampler.setTexture(EbtUint, Esd3D);
}
-#line 8157 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8102 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 379:
-#line 2607 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2609 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, Esd2D, false, false, true);
+ (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube);
}
-#line 8167 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8112 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 380:
-#line 2612 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2614 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, false, true);
+ (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true);
}
-#line 8177 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8122 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 381:
-#line 2617 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2619 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, false, true);
-#endif
+ (yyval.interm.type).sampler.setPureSampler(false);
}
-#line 8190 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8132 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 382:
-#line 2625 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2624 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtInt, Esd2D, true, false, true);
+ (yyval.interm.type).sampler.setPureSampler(true);
}
-#line 8200 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8142 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 383:
@@ -8204,9 +8180,9 @@ yyreduce:
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.set(EbtUint, Esd2D, true, false, true);
+ (yyval.interm.type).sampler.set(EbtFloat, EsdRect);
}
-#line 8210 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8152 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 384:
@@ -8214,1106 +8190,1085 @@ yyreduce:
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setPureSampler(false);
+ (yyval.interm.type).sampler.set(EbtFloat, EsdRect, false, true);
}
-#line 8220 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8162 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 385:
#line 2640 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setPureSampler(true);
+ (yyval.interm.type).sampler.set(EbtFloat16, EsdRect);
}
-#line 8230 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 386:
-#line 2645 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2646 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D);
+ (yyval.interm.type).sampler.set(EbtFloat16, EsdRect, false, true);
}
-#line 8240 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8184 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 387:
-#line 2650 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2652 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D);
-#endif
+ (yyval.interm.type).sampler.set(EbtInt, EsdRect);
}
-#line 8253 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8194 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 388:
-#line 2658 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2657 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D);
+ (yyval.interm.type).sampler.set(EbtUint, EsdRect);
}
-#line 8263 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8204 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 389:
-#line 2663 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2662 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D);
-#endif
+ (yyval.interm.type).sampler.set(EbtFloat, EsdBuffer);
}
-#line 8276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8214 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 390:
-#line 2671 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2667 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, Esd3D);
+ (yyval.interm.type).sampler.set(EbtFloat16, EsdBuffer);
}
-#line 8286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8225 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 391:
-#line 2676 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2673 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd3D);
-#endif
+ (yyval.interm.type).sampler.set(EbtInt, EsdBuffer);
}
-#line 8299 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8235 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 392:
-#line 2684 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2678 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube);
+ (yyval.interm.type).sampler.set(EbtUint, EsdBuffer);
}
-#line 8309 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8245 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 393:
-#line 2689 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2683 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube);
-#endif
+ (yyval.interm.type).sampler.set(EbtFloat, Esd2D, false, false, true);
}
-#line 8322 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8255 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 394:
-#line 2697 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2688 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D, true);
+ (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, false, false, true);
}
-#line 8332 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8266 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 395:
-#line 2702 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2694 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D, true);
-#endif
+ (yyval.interm.type).sampler.set(EbtInt, Esd2D, false, false, true);
}
-#line 8345 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8276 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 396:
-#line 2710 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2699 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true);
+ (yyval.interm.type).sampler.set(EbtUint, Esd2D, false, false, true);
}
-#line 8355 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8286 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 397:
-#line 2715 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2704 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true);
-#endif
+ (yyval.interm.type).sampler.set(EbtFloat, Esd2D, true, false, true);
}
-#line 8368 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8296 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 398:
-#line 2723 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2709 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float sampler", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat, EsdCube, true);
+ (yyval.interm.type).sampler.set(EbtFloat16, Esd2D, true, false, true);
}
-#line 8378 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8307 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 399:
-#line 2728 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2715 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
- parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube, true);
-#endif
+ (yyval.interm.type).sampler.set(EbtInt, Esd2D, true, false, true);
}
-#line 8391 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8317 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 400:
-#line 2736 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2720 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D);
+ (yyval.interm.type).sampler.set(EbtUint, Esd2D, true, false, true);
}
-#line 8401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8327 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 401:
-#line 2741 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2725 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D);
+ (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D);
}
-#line 8411 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8337 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 402:
-#line 2746 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2730 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, Esd3D);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D);
}
-#line 8421 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8348 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 403:
-#line 2751 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2736 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D);
}
-#line 8431 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8359 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 404:
-#line 2756 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2742 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D, true);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd3D);
}
-#line 8441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8370 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 405:
-#line 2761 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2748 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube);
}
-#line 8451 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8381 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 406:
-#line 2766 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2754 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtInt, EsdCube, true);
+ (yyval.interm.type).sampler.setTexture(EbtFloat, Esd1D, true);
}
-#line 8461 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8391 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 407:
-#line 2771 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2759 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd1D, true);
}
-#line 8471 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8402 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 408:
-#line 2776 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2765 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true);
}
-#line 8481 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8413 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 409:
-#line 2781 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2771 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
+ parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, Esd3D);
+ (yyval.interm.type).sampler.setTexture(EbtFloat16, EsdCube, true);
}
-#line 8491 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8424 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 410:
-#line 2786 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2777 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube);
+ (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D);
}
-#line 8501 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8434 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 411:
-#line 2791 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2782 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D, true);
+ (yyval.interm.type).sampler.setTexture(EbtInt, Esd1D, true);
}
-#line 8511 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8444 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 412:
-#line 2796 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2787 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true);
+ (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D);
}
-#line 8521 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8454 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 413:
-#line 2801 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2792 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
- (yyval.interm.type).sampler.setTexture(EbtUint, EsdCube, true);
+ (yyval.interm.type).sampler.setTexture(EbtUint, Esd1D, true);
}
-#line 8531 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8464 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 414:
-#line 2806 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2797 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, EsdRect);
}
-#line 8541 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8474 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 415:
-#line 2811 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2802 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, EsdRect);
-#endif
}
-#line 8554 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8485 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 416:
-#line 2819 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2808 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, EsdRect);
}
-#line 8564 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8495 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 417:
-#line 2824 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2813 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, EsdRect);
}
-#line 8574 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8505 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 418:
-#line 2829 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2818 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, EsdBuffer);
}
-#line 8584 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8515 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 419:
-#line 2834 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2823 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, EsdBuffer);
-#endif
}
-#line 8597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8526 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 420:
-#line 2842 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2829 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, EsdBuffer);
}
-#line 8607 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8536 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 421:
-#line 2847 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2834 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, EsdBuffer);
}
-#line 8617 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8546 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 422:
-#line 2852 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2839 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, false, false, true);
}
-#line 8627 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8556 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 423:
-#line 2857 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2844 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, false, false, true);
-#endif
}
-#line 8640 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8567 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 424:
-#line 2865 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2850 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, false, false, true);
}
-#line 8650 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8577 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 425:
-#line 2870 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2855 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, false, false, true);
}
-#line 8660 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 426:
-#line 2875 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2860 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat, Esd2D, true, false, true);
}
-#line 8670 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 427:
-#line 2880 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2865 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float texture", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtFloat16, Esd2D, true, false, true);
-#endif
}
-#line 8683 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8608 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 428:
-#line 2888 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2871 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtInt, Esd2D, true, false, true);
}
-#line 8693 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8618 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 429:
-#line 2893 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2876 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setTexture(EbtUint, Esd2D, true, false, true);
}
-#line 8703 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8628 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 430:
-#line 2898 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2881 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd1D);
}
-#line 8713 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8638 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 431:
-#line 2903 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2886 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd1D);
-#endif
}
-#line 8726 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8649 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 432:
-#line 2911 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2892 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd1D);
}
-#line 8736 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8659 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 433:
-#line 2916 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2897 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd1D);
}
-#line 8746 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8669 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 434:
-#line 2921 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2902 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D);
}
-#line 8756 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8679 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 435:
-#line 2926 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2907 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D);
-#endif
}
-#line 8769 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8690 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 436:
-#line 2934 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2913 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D);
}
-#line 8779 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8700 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 437:
-#line 2939 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2918 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D);
}
-#line 8789 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8710 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 438:
-#line 2944 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2923 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd3D);
}
-#line 8799 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8720 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 439:
-#line 2949 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2928 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd3D);
-#endif
}
-#line 8812 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8731 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 440:
-#line 2957 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2934 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd3D);
}
-#line 8822 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8741 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 441:
-#line 2962 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2939 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd3D);
}
-#line 8832 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8751 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 442:
-#line 2967 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2944 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdRect);
}
-#line 8842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8761 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 443:
-#line 2972 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2949 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdRect);
-#endif
}
-#line 8855 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8772 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 444:
-#line 2980 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2955 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdRect);
}
-#line 8865 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8782 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 445:
-#line 2985 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2960 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdRect);
}
-#line 8875 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8792 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 446:
-#line 2990 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2965 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdCube);
}
-#line 8885 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8802 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 447:
-#line 2995 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2970 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdCube);
-#endif
}
-#line 8898 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8813 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 448:
-#line 3003 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2976 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdCube);
}
-#line 8908 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 449:
-#line 3008 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2981 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdCube);
}
-#line 8918 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8833 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 450:
-#line 3013 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2986 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdBuffer);
}
-#line 8928 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8843 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 451:
-#line 3018 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2991 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdBuffer);
-#endif
}
-#line 8941 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8854 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 452:
-#line 3026 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 2997 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdBuffer);
}
-#line 8951 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 453:
-#line 3031 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3002 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdBuffer);
}
-#line 8961 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8874 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 454:
-#line 3036 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3007 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd1D, true);
}
-#line 8971 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8884 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 455:
-#line 3041 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3012 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd1D, true);
-#endif
}
-#line 8984 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8895 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 456:
-#line 3049 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3018 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd1D, true);
}
-#line 8994 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8905 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 457:
-#line 3054 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3023 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd1D, true);
}
-#line 9004 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8915 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 458:
-#line 3059 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3028 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true);
}
-#line 9014 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8925 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 459:
-#line 3064 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3033 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, true);
-#endif
}
-#line 9027 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8936 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 460:
-#line 3072 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3039 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true);
}
-#line 9037 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8946 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 461:
-#line 3077 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3044 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true);
}
-#line 9047 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8956 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 462:
-#line 3082 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3049 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, EsdCube, true);
}
-#line 9057 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8966 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 463:
-#line 3087 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3054 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, EsdCube, true);
-#endif
}
-#line 9070 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8977 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 464:
-#line 3095 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3060 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, EsdCube, true);
}
-#line 9080 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8987 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 465:
-#line 3100 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3065 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, EsdCube, true);
}
-#line 9090 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 8997 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 466:
-#line 3105 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3070 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, false, false, true);
}
-#line 9100 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9007 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 467:
-#line 3110 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3075 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, false, false, true);
-#endif
}
-#line 9113 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9018 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 468:
-#line 3118 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3081 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D, false, false, true);
}
-#line 9123 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9028 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 469:
-#line 3123 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3086 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D, false, false, true);
}
-#line 9133 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9038 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 470:
-#line 3128 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3091 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat, Esd2D, true, false, true);
}
-#line 9143 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9048 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 471:
-#line 3133 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3096 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float image", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtFloat16, Esd2D, true, false, true);
-#endif
}
-#line 9156 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9059 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 472:
-#line 3141 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3102 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtInt, Esd2D, true, false, true);
}
-#line 9166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9069 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 473:
-#line 3146 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3107 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setImage(EbtUint, Esd2D, true, false, true);
}
-#line 9176 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9079 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 474:
-#line 3151 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3112 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ // GL_OES_EGL_image_external
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D);
(yyval.interm.type).sampler.external = true;
}
-#line 9187 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9090 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 475:
-#line 3157 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3118 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ // GL_EXT_YUV_target
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.set(EbtFloat, Esd2D);
(yyval.interm.type).sampler.yuv = true;
}
-#line 9198 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9101 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 476:
-#line 3163 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3124 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat);
}
-#line 9209 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9112 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 477:
-#line 3169 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3130 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat, true);
}
-#line 9220 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9123 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 478:
-#line 3175 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3136 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel());
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat16);
-#endif
}
-#line 9234 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9135 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 479:
-#line 3184 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3143 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
-#ifdef AMD_EXTENSIONS
parseContext.float16OpaqueCheck((yyvsp[0].lex).loc, "half float subpass input", parseContext.symbolTable.atBuiltInLevel());
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtFloat16, true);
-#endif
}
-#line 9248 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9147 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 480:
-#line 3193 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3150 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtInt);
}
-#line 9259 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9158 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 481:
-#line 3199 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3156 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtInt, true);
}
-#line 9270 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9169 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 482:
-#line 3205 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3162 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtUint);
}
-#line 9281 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9180 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 483:
-#line 3211 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3168 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[0].lex).loc, EShLangFragment, "subpass input");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtSampler;
(yyval.interm.type).sampler.setSubpass(EbtUint, true);
}
-#line 9292 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9191 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 484:
-#line 3217 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3174 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.fcoopmatCheck((yyvsp[0].lex).loc, "fcoopmatNV", parseContext.symbolTable.atBuiltInLevel());
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
(yyval.interm.type).basicType = EbtFloat;
(yyval.interm.type).coopmat = true;
}
-#line 9303 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9202 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 485:
-#line 3223 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3180 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ {
+ parseContext.intcoopmatCheck((yyvsp[0].lex).loc, "icoopmatNV", parseContext.symbolTable.atBuiltInLevel());
+ (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
+ (yyval.interm.type).basicType = EbtInt;
+ (yyval.interm.type).coopmat = true;
+ }
+#line 9213 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 486:
+#line 3186 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ {
+ parseContext.intcoopmatCheck((yyvsp[0].lex).loc, "ucoopmatNV", parseContext.symbolTable.atBuiltInLevel());
+ (yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
+ (yyval.interm.type).basicType = EbtUint;
+ (yyval.interm.type).coopmat = true;
+ }
+#line 9224 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 487:
+#line 3193 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.type) = (yyvsp[0].interm.type);
(yyval.interm.type).qualifier.storage = parseContext.symbolTable.atGlobalLevel() ? EvqGlobal : EvqTemporary;
parseContext.structTypeCheck((yyval.interm.type).loc, (yyval.interm.type));
}
-#line 9313 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9234 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 486:
-#line 3228 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 488:
+#line 3198 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
//
// This is for user defined type names. The lexical phase looked up the
@@ -9327,47 +9282,47 @@ yyreduce:
} else
parseContext.error((yyvsp[0].lex).loc, "expected type name", (yyvsp[0].lex).string->c_str(), "");
}
-#line 9331 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9252 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 487:
-#line 3244 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 489:
+#line 3214 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "highp precision qualifier");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqHigh);
}
-#line 9341 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9262 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 488:
-#line 3249 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 490:
+#line 3219 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "mediump precision qualifier");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqMedium);
}
-#line 9351 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9272 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 489:
-#line 3254 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 491:
+#line 3224 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.profileRequires((yyvsp[0].lex).loc, ENoProfile, 130, 0, "lowp precision qualifier");
(yyval.interm.type).init((yyvsp[0].lex).loc, parseContext.symbolTable.atGlobalLevel());
parseContext.handlePrecisionQualifier((yyvsp[0].lex).loc, (yyval.interm.type).qualifier, EpqLow);
}
-#line 9361 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9282 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 490:
-#line 3262 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 492:
+#line 3232 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ parseContext.nestedStructCheck((yyvsp[-2].lex).loc); }
-#line 9367 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9288 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 491:
-#line 3262 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 493:
+#line 3232 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
TType* structure = new TType((yyvsp[-1].interm.typeList), *(yyvsp[-4].lex).string);
parseContext.structArrayCheck((yyvsp[-4].lex).loc, *structure);
@@ -9379,17 +9334,17 @@ yyreduce:
(yyval.interm.type).userDef = structure;
--parseContext.structNestingLevel;
}
-#line 9383 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9304 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 492:
-#line 3273 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 494:
+#line 3243 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ parseContext.nestedStructCheck((yyvsp[-1].lex).loc); }
-#line 9389 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9310 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 493:
-#line 3273 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 495:
+#line 3243 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
TType* structure = new TType((yyvsp[-1].interm.typeList), TString(""));
(yyval.interm.type).init((yyvsp[-4].lex).loc);
@@ -9397,19 +9352,19 @@ yyreduce:
(yyval.interm.type).userDef = structure;
--parseContext.structNestingLevel;
}
-#line 9401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9322 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 494:
-#line 3283 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 496:
+#line 3253 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeList) = (yyvsp[0].interm.typeList);
}
-#line 9409 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9330 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 495:
-#line 3286 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 497:
+#line 3256 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeList) = (yyvsp[-1].interm.typeList);
for (unsigned int i = 0; i < (yyvsp[0].interm.typeList)->size(); ++i) {
@@ -9420,16 +9375,16 @@ yyreduce:
(yyval.interm.typeList)->push_back((*(yyvsp[0].interm.typeList))[i]);
}
}
-#line 9424 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9345 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 496:
-#line 3299 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 498:
+#line 3269 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-2].interm.type).arraySizes) {
parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
parseContext.profileRequires((yyvsp[-2].interm.type).loc, EEsProfile, 300, 0, "arrayed type");
- if (parseContext.profile == EEsProfile)
+ if (parseContext.isEsProfile())
parseContext.arraySizeRequiredCheck((yyvsp[-2].interm.type).loc, *(yyvsp[-2].interm.type).arraySizes);
}
@@ -9447,16 +9402,16 @@ yyreduce:
(*(yyval.interm.typeList))[i].type->shallowCopy(type);
}
}
-#line 9451 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9372 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 497:
-#line 3321 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 499:
+#line 3291 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-2].interm.type).arraySizes) {
parseContext.profileRequires((yyvsp[-2].interm.type).loc, ENoProfile, 120, E_GL_3DL_array_objects, "arrayed type");
parseContext.profileRequires((yyvsp[-2].interm.type).loc, EEsProfile, 300, 0, "arrayed type");
- if (parseContext.profile == EEsProfile)
+ if (parseContext.isEsProfile())
parseContext.arraySizeRequiredCheck((yyvsp[-2].interm.type).loc, *(yyvsp[-2].interm.type).arraySizes);
}
@@ -9476,38 +9431,38 @@ yyreduce:
(*(yyval.interm.typeList))[i].type->shallowCopy(type);
}
}
-#line 9480 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9401 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 498:
-#line 3348 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 500:
+#line 3318 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeList) = new TTypeList;
(yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine));
}
-#line 9489 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9410 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 499:
-#line 3352 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 501:
+#line 3322 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeList)->push_back((yyvsp[0].interm.typeLine));
}
-#line 9497 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9418 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 500:
-#line 3358 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 502:
+#line 3328 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.typeLine).type = new TType(EbtVoid);
(yyval.interm.typeLine).loc = (yyvsp[0].lex).loc;
(yyval.interm.typeLine).type->setFieldName(*(yyvsp[0].lex).string);
}
-#line 9507 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9428 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 501:
-#line 3363 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 503:
+#line 3333 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.arrayOfArrayVersionCheck((yyvsp[-1].lex).loc, (yyvsp[0].interm).arraySizes);
@@ -9516,219 +9471,235 @@ yyreduce:
(yyval.interm.typeLine).type->setFieldName(*(yyvsp[-1].lex).string);
(yyval.interm.typeLine).type->transferArraySizes((yyvsp[0].interm).arraySizes);
}
-#line 9520 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9441 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 502:
-#line 3374 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 504:
+#line 3344 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 9528 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9449 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 503:
-#line 3377 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 505:
+#line 3348 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
const char* initFeature = "{ } style initializers";
parseContext.requireProfile((yyvsp[-2].lex).loc, ~EEsProfile, initFeature);
parseContext.profileRequires((yyvsp[-2].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature);
(yyval.interm.intermTypedNode) = (yyvsp[-1].interm.intermTypedNode);
}
-#line 9539 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9460 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 504:
-#line 3383 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 506:
+#line 3354 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
const char* initFeature = "{ } style initializers";
parseContext.requireProfile((yyvsp[-3].lex).loc, ~EEsProfile, initFeature);
parseContext.profileRequires((yyvsp[-3].lex).loc, ~EEsProfile, 420, E_GL_ARB_shading_language_420pack, initFeature);
(yyval.interm.intermTypedNode) = (yyvsp[-2].interm.intermTypedNode);
}
-#line 9550 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9471 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 505:
-#line 3392 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 507:
+#line 3365 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate(0, (yyvsp[0].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode)->getLoc());
}
-#line 9558 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9479 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 506:
-#line 3395 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 508:
+#line 3368 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = parseContext.intermediate.growAggregate((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.intermTypedNode));
}
-#line 9566 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
- break;
-
- case 507:
-#line 3401 "MachineIndependent/glslang.y" /* yacc.c:1646 */
- { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9572 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
- break;
-
- case 508:
-#line 3405 "MachineIndependent/glslang.y" /* yacc.c:1646 */
- { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9578 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9487 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 509:
-#line 3406 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3375 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9584 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9493 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 510:
-#line 3412 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3379 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9590 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9499 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 511:
-#line 3413 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3380 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9596 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9505 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 512:
-#line 3414 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3386 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9602 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9511 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 513:
-#line 3415 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3387 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9608 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9517 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 514:
-#line 3416 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3388 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9614 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9523 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 515:
-#line 3417 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3389 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9620 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9529 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 516:
-#line 3418 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3390 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9626 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9535 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 517:
-#line 3422 "MachineIndependent/glslang.y" /* yacc.c:1646 */
- { (yyval.interm.intermNode) = 0; }
-#line 9632 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 3391 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
+#line 9541 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
case 518:
-#line 3423 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+#line 3392 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
+#line 9547 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 519:
+#line 3394 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ { (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
+#line 9553 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 520:
+#line 3400 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ {
+ parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "demote");
+ parseContext.requireExtensions((yyvsp[-1].lex).loc, 1, &E_GL_EXT_demote_to_helper_invocation, "demote");
+ (yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDemote, (yyvsp[-1].lex).loc);
+ }
+#line 9563 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 521:
+#line 3409 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ { (yyval.interm.intermNode) = 0; }
+#line 9569 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+ break;
+
+ case 522:
+#line 3410 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.push();
++parseContext.statementNestingLevel;
}
-#line 9641 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9578 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 519:
-#line 3427 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 523:
+#line 3414 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
--parseContext.statementNestingLevel;
}
-#line 9650 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9587 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 520:
-#line 3431 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 524:
+#line 3418 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-2].interm.intermNode) && (yyvsp[-2].interm.intermNode)->getAsAggregate())
(yyvsp[-2].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence);
(yyval.interm.intermNode) = (yyvsp[-2].interm.intermNode);
}
-#line 9660 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9597 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 521:
-#line 3439 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 525:
+#line 3426 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9666 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9603 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 522:
-#line 3440 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 526:
+#line 3427 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = (yyvsp[0].interm.intermNode); }
-#line 9672 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9609 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 523:
-#line 3444 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 527:
+#line 3431 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
++parseContext.controlFlowNestingLevel;
}
-#line 9680 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9617 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 524:
-#line 3447 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 528:
+#line 3434 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
--parseContext.controlFlowNestingLevel;
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9689 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9626 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 525:
-#line 3451 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 529:
+#line 3438 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.push();
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 9699 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9636 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 526:
-#line 3456 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 530:
+#line 3443 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9710 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9647 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 527:
-#line 3465 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 531:
+#line 3452 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = 0;
}
-#line 9718 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9655 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 528:
-#line 3468 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 532:
+#line 3455 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[-1].interm.intermNode) && (yyvsp[-1].interm.intermNode)->getAsAggregate())
(yyvsp[-1].interm.intermNode)->getAsAggregate()->setOperator(EOpSequence);
(yyval.interm.intermNode) = (yyvsp[-1].interm.intermNode);
}
-#line 9728 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9665 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 529:
-#line 3476 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 533:
+#line 3463 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[0].interm.intermNode));
if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase ||
@@ -9737,11 +9708,11 @@ yyreduce:
(yyval.interm.intermNode) = 0; // start a fresh subsequence for what's after this case
}
}
-#line 9741 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9678 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 530:
-#line 3484 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 534:
+#line 3471 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[0].interm.intermNode) && (yyvsp[0].interm.intermNode)->getAsBranchNode() && ((yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpCase ||
(yyvsp[0].interm.intermNode)->getAsBranchNode()->getFlowOp() == EOpDefault)) {
@@ -9750,76 +9721,76 @@ yyreduce:
} else
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode));
}
-#line 9754 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9691 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 531:
-#line 3495 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 535:
+#line 3482 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = 0; }
-#line 9760 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9697 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 532:
-#line 3496 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 536:
+#line 3483 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{ (yyval.interm.intermNode) = static_cast<TIntermNode*>((yyvsp[-1].interm.intermTypedNode)); }
-#line 9766 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9703 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 533:
-#line 3500 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 537:
+#line 3487 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9774 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9711 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 534:
-#line 3503 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 538:
+#line 3491 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.handleSelectionAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode));
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9783 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9720 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 535:
-#line 3509 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 539:
+#line 3498 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.boolCheck((yyvsp[-4].lex).loc, (yyvsp[-2].interm.intermTypedNode));
(yyval.interm.intermNode) = parseContext.intermediate.addSelection((yyvsp[-2].interm.intermTypedNode), (yyvsp[0].interm.nodePair), (yyvsp[-4].lex).loc);
}
-#line 9792 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9729 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 536:
-#line 3516 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 540:
+#line 3505 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermNode);
(yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermNode);
}
-#line 9801 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9738 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 537:
-#line 3520 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 541:
+#line 3509 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.nodePair).node1 = (yyvsp[0].interm.intermNode);
(yyval.interm.nodePair).node2 = 0;
}
-#line 9810 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9747 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 538:
-#line 3528 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 542:
+#line 3517 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
parseContext.boolCheck((yyvsp[0].interm.intermTypedNode)->getLoc(), (yyvsp[0].interm.intermTypedNode));
}
-#line 9819 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9756 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 539:
-#line 3532 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 543:
+#line 3521 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.boolCheck((yyvsp[-2].lex).loc, (yyvsp[-3].interm.type));
@@ -9830,28 +9801,28 @@ yyreduce:
else
(yyval.interm.intermTypedNode) = 0;
}
-#line 9834 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9771 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 540:
-#line 3545 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 544:
+#line 3534 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9842 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9779 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 541:
-#line 3548 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 545:
+#line 3538 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.handleSwitchAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode));
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9851 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9788 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 542:
-#line 3554 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 546:
+#line 3545 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// start new switch sequence on the switch stack
++parseContext.controlFlowNestingLevel;
@@ -9860,11 +9831,11 @@ yyreduce:
parseContext.switchLevel.push_back(parseContext.statementNestingLevel);
parseContext.symbolTable.push();
}
-#line 9864 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9801 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 543:
-#line 3562 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 547:
+#line 3553 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = parseContext.addSwitch((yyvsp[-7].lex).loc, (yyvsp[-5].interm.intermTypedNode), (yyvsp[-1].interm.intermNode) ? (yyvsp[-1].interm.intermNode)->getAsAggregate() : 0);
delete parseContext.switchSequenceStack.back();
@@ -9874,27 +9845,27 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 9878 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9815 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 544:
-#line 3574 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 548:
+#line 3565 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = 0;
}
-#line 9886 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9823 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 545:
-#line 3577 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 549:
+#line 3568 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9894 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9831 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 546:
-#line 3583 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 550:
+#line 3574 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = 0;
if (parseContext.switchLevel.size() == 0)
@@ -9907,11 +9878,11 @@ yyreduce:
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpCase, (yyvsp[-1].interm.intermTypedNode), (yyvsp[-2].lex).loc);
}
}
-#line 9911 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9848 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 547:
-#line 3595 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 551:
+#line 3586 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = 0;
if (parseContext.switchLevel.size() == 0)
@@ -9921,28 +9892,28 @@ yyreduce:
else
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpDefault, (yyvsp[-1].lex).loc);
}
-#line 9925 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9862 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 548:
-#line 3607 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 552:
+#line 3598 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9933 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9870 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 549:
-#line 3610 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 553:
+#line 3602 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.handleLoopAttributes(*(yyvsp[-1].interm.attributes), (yyvsp[0].interm.intermNode));
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 9942 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9879 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 550:
-#line 3616 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 554:
+#line 3609 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if (! parseContext.limits.whileLoops)
parseContext.error((yyvsp[-1].lex).loc, "while loops not available", "limitation", "");
@@ -9951,11 +9922,11 @@ yyreduce:
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 9955 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9892 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 551:
-#line 3624 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 555:
+#line 3617 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
(yyval.interm.intermNode) = parseContext.intermediate.addLoop((yyvsp[0].interm.intermNode), (yyvsp[-2].interm.intermTypedNode), 0, true, (yyvsp[-5].lex).loc);
@@ -9963,21 +9934,21 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 9967 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9904 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 552:
-#line 3631 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 556:
+#line 3624 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
++parseContext.loopNestingLevel;
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 9977 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9914 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 553:
-#line 3636 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 557:
+#line 3629 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if (! parseContext.limits.whileLoops)
parseContext.error((yyvsp[-7].lex).loc, "do-while loops not available", "limitation", "");
@@ -9989,22 +9960,22 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 9993 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9930 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 554:
-#line 3647 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 558:
+#line 3640 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.push();
++parseContext.loopNestingLevel;
++parseContext.statementNestingLevel;
++parseContext.controlFlowNestingLevel;
}
-#line 10004 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9941 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 555:
-#line 3653 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 559:
+#line 3646 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.symbolTable.pop(&parseContext.defaultPrecision[0]);
(yyval.interm.intermNode) = parseContext.intermediate.makeAggregate((yyvsp[-3].interm.intermNode), (yyvsp[-5].lex).loc);
@@ -10017,81 +9988,81 @@ yyreduce:
--parseContext.statementNestingLevel;
--parseContext.controlFlowNestingLevel;
}
-#line 10021 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9958 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 556:
-#line 3668 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 560:
+#line 3661 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 10029 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9966 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 557:
-#line 3671 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 561:
+#line 3664 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 10037 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9974 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 558:
-#line 3677 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 562:
+#line 3670 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = (yyvsp[0].interm.intermTypedNode);
}
-#line 10045 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9982 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 559:
-#line 3680 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 563:
+#line 3673 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermTypedNode) = 0;
}
-#line 10053 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9990 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 560:
-#line 3686 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 564:
+#line 3679 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.nodePair).node1 = (yyvsp[-1].interm.intermTypedNode);
(yyval.interm.nodePair).node2 = 0;
}
-#line 10062 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 9999 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 561:
-#line 3690 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 565:
+#line 3683 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.nodePair).node1 = (yyvsp[-2].interm.intermTypedNode);
(yyval.interm.nodePair).node2 = (yyvsp[0].interm.intermTypedNode);
}
-#line 10071 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10008 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 562:
-#line 3697 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 566:
+#line 3690 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if (parseContext.loopNestingLevel <= 0)
parseContext.error((yyvsp[-1].lex).loc, "continue statement only allowed in loops", "", "");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpContinue, (yyvsp[-1].lex).loc);
}
-#line 10081 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10018 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 563:
-#line 3702 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 567:
+#line 3695 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if (parseContext.loopNestingLevel + parseContext.switchSequenceStack.size() <= 0)
parseContext.error((yyvsp[-1].lex).loc, "break statement only allowed in switch and loops", "", "");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpBreak, (yyvsp[-1].lex).loc);
}
-#line 10091 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10028 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 564:
-#line 3707 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 568:
+#line 3700 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpReturn, (yyvsp[-1].lex).loc);
if (parseContext.currentFunctionType->getBasicType() != EbtVoid)
@@ -10099,83 +10070,83 @@ yyreduce:
if (parseContext.inMain)
parseContext.postEntryPointReturn = true;
}
-#line 10103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10040 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 565:
-#line 3714 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 569:
+#line 3707 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = parseContext.handleReturnValue((yyvsp[-2].lex).loc, (yyvsp[-1].interm.intermTypedNode));
}
-#line 10111 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10048 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 566:
-#line 3717 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 570:
+#line 3710 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireStage((yyvsp[-1].lex).loc, EShLangFragment, "discard");
(yyval.interm.intermNode) = parseContext.intermediate.addBranch(EOpKill, (yyvsp[-1].lex).loc);
}
-#line 10120 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10057 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 567:
-#line 3726 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 571:
+#line 3719 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
parseContext.intermediate.setTreeRoot((yyval.interm.intermNode));
}
-#line 10129 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10066 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 568:
-#line 3730 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 572:
+#line 3723 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
if ((yyvsp[0].interm.intermNode) != nullptr) {
(yyval.interm.intermNode) = parseContext.intermediate.growAggregate((yyvsp[-1].interm.intermNode), (yyvsp[0].interm.intermNode));
parseContext.intermediate.setTreeRoot((yyval.interm.intermNode));
}
}
-#line 10140 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10077 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 569:
-#line 3739 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 573:
+#line 3732 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 10148 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10085 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 570:
-#line 3742 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 574:
+#line 3735 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.intermNode) = (yyvsp[0].interm.intermNode);
}
-#line 10156 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10093 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 571:
-#line 3745 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 575:
+#line 3739 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
parseContext.requireProfile((yyvsp[0].lex).loc, ~EEsProfile, "extraneous semicolon");
parseContext.profileRequires((yyvsp[0].lex).loc, ~EEsProfile, 460, nullptr, "extraneous semicolon");
(yyval.interm.intermNode) = nullptr;
}
-#line 10166 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10103 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 572:
-#line 3753 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 576:
+#line 3748 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyvsp[0].interm).function = parseContext.handleFunctionDeclarator((yyvsp[0].interm).loc, *(yyvsp[0].interm).function, false /* not prototype */);
(yyvsp[0].interm).intermNode = parseContext.handleFunctionDefinition((yyvsp[0].interm).loc, *(yyvsp[0].interm).function);
}
-#line 10175 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10112 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 573:
-#line 3757 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 577:
+#line 3752 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
// May be best done as post process phase on intermediate code
if (parseContext.currentFunctionType->getBasicType() != EbtVoid && ! parseContext.functionReturnsValue)
@@ -10191,52 +10162,52 @@ yyreduce:
(yyval.interm.intermNode)->getAsAggregate()->setDebug(parseContext.contextPragma.debug);
(yyval.interm.intermNode)->getAsAggregate()->setPragmaTable(parseContext.contextPragma.pragmaTable);
}
-#line 10195 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10132 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 574:
-#line 3775 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 578:
+#line 3771 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.attributes) = (yyvsp[-2].interm.attributes);
parseContext.requireExtensions((yyvsp[-4].lex).loc, 1, &E_GL_EXT_control_flow_attributes, "attribute");
}
-#line 10204 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10141 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 575:
-#line 3781 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 579:
+#line 3777 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.attributes) = (yyvsp[0].interm.attributes);
}
-#line 10212 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10149 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 576:
-#line 3784 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 580:
+#line 3780 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.attributes) = parseContext.mergeAttributes((yyvsp[-2].interm.attributes), (yyvsp[0].interm.attributes));
}
-#line 10220 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10157 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 577:
-#line 3789 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 581:
+#line 3785 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.attributes) = parseContext.makeAttributes(*(yyvsp[0].lex).string);
}
-#line 10228 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10165 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
- case 578:
-#line 3792 "MachineIndependent/glslang.y" /* yacc.c:1646 */
+ case 582:
+#line 3788 "MachineIndependent/glslang.y" /* yacc.c:1646 */
{
(yyval.interm.attributes) = parseContext.makeAttributes(*(yyvsp[-3].lex).string, (yyvsp[-1].interm.intermTypedNode));
}
-#line 10236 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10173 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
break;
-#line 10240 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
+#line 10177 "MachineIndependent/glslang_tab.cpp" /* yacc.c:1646 */
default: break;
}
/* User semantic actions sometimes alter yychar, and that requires
@@ -10464,5 +10435,5 @@ yyreturn:
#endif
return yyresult;
}
-#line 3796 "MachineIndependent/glslang.y" /* yacc.c:1906 */
+#line 3793 "MachineIndependent/glslang.y" /* yacc.c:1906 */
diff --git a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h
index a467db644b..f4f4114730 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/glslang_tab.cpp.h
@@ -45,271 +45,271 @@ extern int yydebug;
# define YYTOKENTYPE
enum yytokentype
{
- ATTRIBUTE = 258,
- VARYING = 259,
- FLOAT16_T = 260,
- FLOAT = 261,
- FLOAT32_T = 262,
- DOUBLE = 263,
- FLOAT64_T = 264,
- CONST = 265,
- BOOL = 266,
- INT = 267,
- UINT = 268,
- INT64_T = 269,
- UINT64_T = 270,
- INT32_T = 271,
- UINT32_T = 272,
- INT16_T = 273,
- UINT16_T = 274,
- INT8_T = 275,
- UINT8_T = 276,
- BREAK = 277,
- CONTINUE = 278,
- DO = 279,
- ELSE = 280,
- FOR = 281,
- IF = 282,
- DISCARD = 283,
- RETURN = 284,
- SWITCH = 285,
- CASE = 286,
- DEFAULT = 287,
- SUBROUTINE = 288,
- BVEC2 = 289,
- BVEC3 = 290,
- BVEC4 = 291,
- IVEC2 = 292,
- IVEC3 = 293,
- IVEC4 = 294,
- UVEC2 = 295,
- UVEC3 = 296,
- UVEC4 = 297,
- I64VEC2 = 298,
- I64VEC3 = 299,
- I64VEC4 = 300,
- U64VEC2 = 301,
- U64VEC3 = 302,
- U64VEC4 = 303,
- I32VEC2 = 304,
- I32VEC3 = 305,
- I32VEC4 = 306,
- U32VEC2 = 307,
- U32VEC3 = 308,
- U32VEC4 = 309,
- I16VEC2 = 310,
- I16VEC3 = 311,
- I16VEC4 = 312,
- U16VEC2 = 313,
- U16VEC3 = 314,
- U16VEC4 = 315,
- I8VEC2 = 316,
- I8VEC3 = 317,
- I8VEC4 = 318,
- U8VEC2 = 319,
- U8VEC3 = 320,
- U8VEC4 = 321,
- VEC2 = 322,
- VEC3 = 323,
- VEC4 = 324,
- MAT2 = 325,
- MAT3 = 326,
- MAT4 = 327,
- CENTROID = 328,
- IN = 329,
- OUT = 330,
- INOUT = 331,
- UNIFORM = 332,
- PATCH = 333,
- SAMPLE = 334,
- BUFFER = 335,
- SHARED = 336,
- NONUNIFORM = 337,
- PAYLOADNV = 338,
- PAYLOADINNV = 339,
- HITATTRNV = 340,
- CALLDATANV = 341,
- CALLDATAINNV = 342,
- COHERENT = 343,
- VOLATILE = 344,
- RESTRICT = 345,
- READONLY = 346,
- WRITEONLY = 347,
- DEVICECOHERENT = 348,
- QUEUEFAMILYCOHERENT = 349,
- WORKGROUPCOHERENT = 350,
- SUBGROUPCOHERENT = 351,
- NONPRIVATE = 352,
- DVEC2 = 353,
- DVEC3 = 354,
- DVEC4 = 355,
- DMAT2 = 356,
- DMAT3 = 357,
- DMAT4 = 358,
- F16VEC2 = 359,
- F16VEC3 = 360,
- F16VEC4 = 361,
- F16MAT2 = 362,
- F16MAT3 = 363,
- F16MAT4 = 364,
- F32VEC2 = 365,
- F32VEC3 = 366,
- F32VEC4 = 367,
- F32MAT2 = 368,
- F32MAT3 = 369,
- F32MAT4 = 370,
- F64VEC2 = 371,
- F64VEC3 = 372,
- F64VEC4 = 373,
- F64MAT2 = 374,
- F64MAT3 = 375,
- F64MAT4 = 376,
- NOPERSPECTIVE = 377,
- FLAT = 378,
- SMOOTH = 379,
- LAYOUT = 380,
- EXPLICITINTERPAMD = 381,
- PERVERTEXNV = 382,
- PERPRIMITIVENV = 383,
- PERVIEWNV = 384,
- PERTASKNV = 385,
- MAT2X2 = 386,
- MAT2X3 = 387,
- MAT2X4 = 388,
- MAT3X2 = 389,
- MAT3X3 = 390,
- MAT3X4 = 391,
- MAT4X2 = 392,
- MAT4X3 = 393,
- MAT4X4 = 394,
- DMAT2X2 = 395,
- DMAT2X3 = 396,
- DMAT2X4 = 397,
- DMAT3X2 = 398,
- DMAT3X3 = 399,
- DMAT3X4 = 400,
- DMAT4X2 = 401,
- DMAT4X3 = 402,
- DMAT4X4 = 403,
- F16MAT2X2 = 404,
- F16MAT2X3 = 405,
- F16MAT2X4 = 406,
- F16MAT3X2 = 407,
- F16MAT3X3 = 408,
- F16MAT3X4 = 409,
- F16MAT4X2 = 410,
- F16MAT4X3 = 411,
- F16MAT4X4 = 412,
- F32MAT2X2 = 413,
- F32MAT2X3 = 414,
- F32MAT2X4 = 415,
- F32MAT3X2 = 416,
- F32MAT3X3 = 417,
- F32MAT3X4 = 418,
- F32MAT4X2 = 419,
- F32MAT4X3 = 420,
- F32MAT4X4 = 421,
- F64MAT2X2 = 422,
- F64MAT2X3 = 423,
- F64MAT2X4 = 424,
- F64MAT3X2 = 425,
- F64MAT3X3 = 426,
- F64MAT3X4 = 427,
- F64MAT4X2 = 428,
- F64MAT4X3 = 429,
- F64MAT4X4 = 430,
- ATOMIC_UINT = 431,
- ACCSTRUCTNV = 432,
- FCOOPMATNV = 433,
- SAMPLER1D = 434,
- SAMPLER2D = 435,
- SAMPLER3D = 436,
- SAMPLERCUBE = 437,
- SAMPLER1DSHADOW = 438,
- SAMPLER2DSHADOW = 439,
- SAMPLERCUBESHADOW = 440,
- SAMPLER1DARRAY = 441,
- SAMPLER2DARRAY = 442,
- SAMPLER1DARRAYSHADOW = 443,
- SAMPLER2DARRAYSHADOW = 444,
- ISAMPLER1D = 445,
- ISAMPLER2D = 446,
- ISAMPLER3D = 447,
- ISAMPLERCUBE = 448,
- ISAMPLER1DARRAY = 449,
- ISAMPLER2DARRAY = 450,
- USAMPLER1D = 451,
- USAMPLER2D = 452,
- USAMPLER3D = 453,
- USAMPLERCUBE = 454,
- USAMPLER1DARRAY = 455,
- USAMPLER2DARRAY = 456,
- SAMPLER2DRECT = 457,
- SAMPLER2DRECTSHADOW = 458,
- ISAMPLER2DRECT = 459,
- USAMPLER2DRECT = 460,
- SAMPLERBUFFER = 461,
- ISAMPLERBUFFER = 462,
- USAMPLERBUFFER = 463,
- SAMPLERCUBEARRAY = 464,
- SAMPLERCUBEARRAYSHADOW = 465,
- ISAMPLERCUBEARRAY = 466,
- USAMPLERCUBEARRAY = 467,
- SAMPLER2DMS = 468,
- ISAMPLER2DMS = 469,
- USAMPLER2DMS = 470,
- SAMPLER2DMSARRAY = 471,
- ISAMPLER2DMSARRAY = 472,
- USAMPLER2DMSARRAY = 473,
- SAMPLEREXTERNALOES = 474,
- SAMPLEREXTERNAL2DY2YEXT = 475,
- F16SAMPLER1D = 476,
- F16SAMPLER2D = 477,
- F16SAMPLER3D = 478,
- F16SAMPLER2DRECT = 479,
- F16SAMPLERCUBE = 480,
- F16SAMPLER1DARRAY = 481,
- F16SAMPLER2DARRAY = 482,
- F16SAMPLERCUBEARRAY = 483,
- F16SAMPLERBUFFER = 484,
- F16SAMPLER2DMS = 485,
- F16SAMPLER2DMSARRAY = 486,
- F16SAMPLER1DSHADOW = 487,
- F16SAMPLER2DSHADOW = 488,
- F16SAMPLER1DARRAYSHADOW = 489,
- F16SAMPLER2DARRAYSHADOW = 490,
- F16SAMPLER2DRECTSHADOW = 491,
- F16SAMPLERCUBESHADOW = 492,
- F16SAMPLERCUBEARRAYSHADOW = 493,
- SAMPLER = 494,
- SAMPLERSHADOW = 495,
- TEXTURE1D = 496,
- TEXTURE2D = 497,
- TEXTURE3D = 498,
- TEXTURECUBE = 499,
- TEXTURE1DARRAY = 500,
- TEXTURE2DARRAY = 501,
- ITEXTURE1D = 502,
- ITEXTURE2D = 503,
- ITEXTURE3D = 504,
- ITEXTURECUBE = 505,
- ITEXTURE1DARRAY = 506,
- ITEXTURE2DARRAY = 507,
- UTEXTURE1D = 508,
- UTEXTURE2D = 509,
- UTEXTURE3D = 510,
- UTEXTURECUBE = 511,
- UTEXTURE1DARRAY = 512,
- UTEXTURE2DARRAY = 513,
- TEXTURE2DRECT = 514,
- ITEXTURE2DRECT = 515,
- UTEXTURE2DRECT = 516,
- TEXTUREBUFFER = 517,
- ITEXTUREBUFFER = 518,
- UTEXTUREBUFFER = 519,
- TEXTURECUBEARRAY = 520,
- ITEXTURECUBEARRAY = 521,
- UTEXTURECUBEARRAY = 522,
+ CONST = 258,
+ BOOL = 259,
+ INT = 260,
+ UINT = 261,
+ FLOAT = 262,
+ BVEC2 = 263,
+ BVEC3 = 264,
+ BVEC4 = 265,
+ IVEC2 = 266,
+ IVEC3 = 267,
+ IVEC4 = 268,
+ UVEC2 = 269,
+ UVEC3 = 270,
+ UVEC4 = 271,
+ VEC2 = 272,
+ VEC3 = 273,
+ VEC4 = 274,
+ MAT2 = 275,
+ MAT3 = 276,
+ MAT4 = 277,
+ MAT2X2 = 278,
+ MAT2X3 = 279,
+ MAT2X4 = 280,
+ MAT3X2 = 281,
+ MAT3X3 = 282,
+ MAT3X4 = 283,
+ MAT4X2 = 284,
+ MAT4X3 = 285,
+ MAT4X4 = 286,
+ SAMPLER2D = 287,
+ SAMPLER3D = 288,
+ SAMPLERCUBE = 289,
+ SAMPLER2DSHADOW = 290,
+ SAMPLERCUBESHADOW = 291,
+ SAMPLER2DARRAY = 292,
+ SAMPLER2DARRAYSHADOW = 293,
+ ISAMPLER2D = 294,
+ ISAMPLER3D = 295,
+ ISAMPLERCUBE = 296,
+ ISAMPLER2DARRAY = 297,
+ USAMPLER2D = 298,
+ USAMPLER3D = 299,
+ USAMPLERCUBE = 300,
+ USAMPLER2DARRAY = 301,
+ SAMPLER = 302,
+ SAMPLERSHADOW = 303,
+ TEXTURE2D = 304,
+ TEXTURE3D = 305,
+ TEXTURECUBE = 306,
+ TEXTURE2DARRAY = 307,
+ ITEXTURE2D = 308,
+ ITEXTURE3D = 309,
+ ITEXTURECUBE = 310,
+ ITEXTURE2DARRAY = 311,
+ UTEXTURE2D = 312,
+ UTEXTURE3D = 313,
+ UTEXTURECUBE = 314,
+ UTEXTURE2DARRAY = 315,
+ ATTRIBUTE = 316,
+ VARYING = 317,
+ FLOAT16_T = 318,
+ FLOAT32_T = 319,
+ DOUBLE = 320,
+ FLOAT64_T = 321,
+ INT64_T = 322,
+ UINT64_T = 323,
+ INT32_T = 324,
+ UINT32_T = 325,
+ INT16_T = 326,
+ UINT16_T = 327,
+ INT8_T = 328,
+ UINT8_T = 329,
+ I64VEC2 = 330,
+ I64VEC3 = 331,
+ I64VEC4 = 332,
+ U64VEC2 = 333,
+ U64VEC3 = 334,
+ U64VEC4 = 335,
+ I32VEC2 = 336,
+ I32VEC3 = 337,
+ I32VEC4 = 338,
+ U32VEC2 = 339,
+ U32VEC3 = 340,
+ U32VEC4 = 341,
+ I16VEC2 = 342,
+ I16VEC3 = 343,
+ I16VEC4 = 344,
+ U16VEC2 = 345,
+ U16VEC3 = 346,
+ U16VEC4 = 347,
+ I8VEC2 = 348,
+ I8VEC3 = 349,
+ I8VEC4 = 350,
+ U8VEC2 = 351,
+ U8VEC3 = 352,
+ U8VEC4 = 353,
+ DVEC2 = 354,
+ DVEC3 = 355,
+ DVEC4 = 356,
+ DMAT2 = 357,
+ DMAT3 = 358,
+ DMAT4 = 359,
+ F16VEC2 = 360,
+ F16VEC3 = 361,
+ F16VEC4 = 362,
+ F16MAT2 = 363,
+ F16MAT3 = 364,
+ F16MAT4 = 365,
+ F32VEC2 = 366,
+ F32VEC3 = 367,
+ F32VEC4 = 368,
+ F32MAT2 = 369,
+ F32MAT3 = 370,
+ F32MAT4 = 371,
+ F64VEC2 = 372,
+ F64VEC3 = 373,
+ F64VEC4 = 374,
+ F64MAT2 = 375,
+ F64MAT3 = 376,
+ F64MAT4 = 377,
+ DMAT2X2 = 378,
+ DMAT2X3 = 379,
+ DMAT2X4 = 380,
+ DMAT3X2 = 381,
+ DMAT3X3 = 382,
+ DMAT3X4 = 383,
+ DMAT4X2 = 384,
+ DMAT4X3 = 385,
+ DMAT4X4 = 386,
+ F16MAT2X2 = 387,
+ F16MAT2X3 = 388,
+ F16MAT2X4 = 389,
+ F16MAT3X2 = 390,
+ F16MAT3X3 = 391,
+ F16MAT3X4 = 392,
+ F16MAT4X2 = 393,
+ F16MAT4X3 = 394,
+ F16MAT4X4 = 395,
+ F32MAT2X2 = 396,
+ F32MAT2X3 = 397,
+ F32MAT2X4 = 398,
+ F32MAT3X2 = 399,
+ F32MAT3X3 = 400,
+ F32MAT3X4 = 401,
+ F32MAT4X2 = 402,
+ F32MAT4X3 = 403,
+ F32MAT4X4 = 404,
+ F64MAT2X2 = 405,
+ F64MAT2X3 = 406,
+ F64MAT2X4 = 407,
+ F64MAT3X2 = 408,
+ F64MAT3X3 = 409,
+ F64MAT3X4 = 410,
+ F64MAT4X2 = 411,
+ F64MAT4X3 = 412,
+ F64MAT4X4 = 413,
+ ATOMIC_UINT = 414,
+ ACCSTRUCTNV = 415,
+ FCOOPMATNV = 416,
+ ICOOPMATNV = 417,
+ UCOOPMATNV = 418,
+ SAMPLERCUBEARRAY = 419,
+ SAMPLERCUBEARRAYSHADOW = 420,
+ ISAMPLERCUBEARRAY = 421,
+ USAMPLERCUBEARRAY = 422,
+ SAMPLER1D = 423,
+ SAMPLER1DARRAY = 424,
+ SAMPLER1DARRAYSHADOW = 425,
+ ISAMPLER1D = 426,
+ SAMPLER1DSHADOW = 427,
+ SAMPLER2DRECT = 428,
+ SAMPLER2DRECTSHADOW = 429,
+ ISAMPLER2DRECT = 430,
+ USAMPLER2DRECT = 431,
+ SAMPLERBUFFER = 432,
+ ISAMPLERBUFFER = 433,
+ USAMPLERBUFFER = 434,
+ SAMPLER2DMS = 435,
+ ISAMPLER2DMS = 436,
+ USAMPLER2DMS = 437,
+ SAMPLER2DMSARRAY = 438,
+ ISAMPLER2DMSARRAY = 439,
+ USAMPLER2DMSARRAY = 440,
+ SAMPLEREXTERNALOES = 441,
+ SAMPLEREXTERNAL2DY2YEXT = 442,
+ ISAMPLER1DARRAY = 443,
+ USAMPLER1D = 444,
+ USAMPLER1DARRAY = 445,
+ F16SAMPLER1D = 446,
+ F16SAMPLER2D = 447,
+ F16SAMPLER3D = 448,
+ F16SAMPLER2DRECT = 449,
+ F16SAMPLERCUBE = 450,
+ F16SAMPLER1DARRAY = 451,
+ F16SAMPLER2DARRAY = 452,
+ F16SAMPLERCUBEARRAY = 453,
+ F16SAMPLERBUFFER = 454,
+ F16SAMPLER2DMS = 455,
+ F16SAMPLER2DMSARRAY = 456,
+ F16SAMPLER1DSHADOW = 457,
+ F16SAMPLER2DSHADOW = 458,
+ F16SAMPLER1DARRAYSHADOW = 459,
+ F16SAMPLER2DARRAYSHADOW = 460,
+ F16SAMPLER2DRECTSHADOW = 461,
+ F16SAMPLERCUBESHADOW = 462,
+ F16SAMPLERCUBEARRAYSHADOW = 463,
+ IMAGE1D = 464,
+ IIMAGE1D = 465,
+ UIMAGE1D = 466,
+ IMAGE2D = 467,
+ IIMAGE2D = 468,
+ UIMAGE2D = 469,
+ IMAGE3D = 470,
+ IIMAGE3D = 471,
+ UIMAGE3D = 472,
+ IMAGE2DRECT = 473,
+ IIMAGE2DRECT = 474,
+ UIMAGE2DRECT = 475,
+ IMAGECUBE = 476,
+ IIMAGECUBE = 477,
+ UIMAGECUBE = 478,
+ IMAGEBUFFER = 479,
+ IIMAGEBUFFER = 480,
+ UIMAGEBUFFER = 481,
+ IMAGE1DARRAY = 482,
+ IIMAGE1DARRAY = 483,
+ UIMAGE1DARRAY = 484,
+ IMAGE2DARRAY = 485,
+ IIMAGE2DARRAY = 486,
+ UIMAGE2DARRAY = 487,
+ IMAGECUBEARRAY = 488,
+ IIMAGECUBEARRAY = 489,
+ UIMAGECUBEARRAY = 490,
+ IMAGE2DMS = 491,
+ IIMAGE2DMS = 492,
+ UIMAGE2DMS = 493,
+ IMAGE2DMSARRAY = 494,
+ IIMAGE2DMSARRAY = 495,
+ UIMAGE2DMSARRAY = 496,
+ F16IMAGE1D = 497,
+ F16IMAGE2D = 498,
+ F16IMAGE3D = 499,
+ F16IMAGE2DRECT = 500,
+ F16IMAGECUBE = 501,
+ F16IMAGE1DARRAY = 502,
+ F16IMAGE2DARRAY = 503,
+ F16IMAGECUBEARRAY = 504,
+ F16IMAGEBUFFER = 505,
+ F16IMAGE2DMS = 506,
+ F16IMAGE2DMSARRAY = 507,
+ TEXTURECUBEARRAY = 508,
+ ITEXTURECUBEARRAY = 509,
+ UTEXTURECUBEARRAY = 510,
+ TEXTURE1D = 511,
+ ITEXTURE1D = 512,
+ UTEXTURE1D = 513,
+ TEXTURE1DARRAY = 514,
+ ITEXTURE1DARRAY = 515,
+ UTEXTURE1DARRAY = 516,
+ TEXTURE2DRECT = 517,
+ ITEXTURE2DRECT = 518,
+ UTEXTURE2DRECT = 519,
+ TEXTUREBUFFER = 520,
+ ITEXTUREBUFFER = 521,
+ UTEXTUREBUFFER = 522,
TEXTURE2DMS = 523,
ITEXTURE2DMS = 524,
UTEXTURE2DMS = 525,
@@ -335,121 +335,124 @@ extern int yydebug;
USUBPASSINPUTMS = 545,
F16SUBPASSINPUT = 546,
F16SUBPASSINPUTMS = 547,
- IMAGE1D = 548,
- IIMAGE1D = 549,
- UIMAGE1D = 550,
- IMAGE2D = 551,
- IIMAGE2D = 552,
- UIMAGE2D = 553,
- IMAGE3D = 554,
- IIMAGE3D = 555,
- UIMAGE3D = 556,
- IMAGE2DRECT = 557,
- IIMAGE2DRECT = 558,
- UIMAGE2DRECT = 559,
- IMAGECUBE = 560,
- IIMAGECUBE = 561,
- UIMAGECUBE = 562,
- IMAGEBUFFER = 563,
- IIMAGEBUFFER = 564,
- UIMAGEBUFFER = 565,
- IMAGE1DARRAY = 566,
- IIMAGE1DARRAY = 567,
- UIMAGE1DARRAY = 568,
- IMAGE2DARRAY = 569,
- IIMAGE2DARRAY = 570,
- UIMAGE2DARRAY = 571,
- IMAGECUBEARRAY = 572,
- IIMAGECUBEARRAY = 573,
- UIMAGECUBEARRAY = 574,
- IMAGE2DMS = 575,
- IIMAGE2DMS = 576,
- UIMAGE2DMS = 577,
- IMAGE2DMSARRAY = 578,
- IIMAGE2DMSARRAY = 579,
- UIMAGE2DMSARRAY = 580,
- F16IMAGE1D = 581,
- F16IMAGE2D = 582,
- F16IMAGE3D = 583,
- F16IMAGE2DRECT = 584,
- F16IMAGECUBE = 585,
- F16IMAGE1DARRAY = 586,
- F16IMAGE2DARRAY = 587,
- F16IMAGECUBEARRAY = 588,
- F16IMAGEBUFFER = 589,
- F16IMAGE2DMS = 590,
- F16IMAGE2DMSARRAY = 591,
- STRUCT = 592,
- VOID = 593,
- WHILE = 594,
- IDENTIFIER = 595,
- TYPE_NAME = 596,
- FLOATCONSTANT = 597,
- DOUBLECONSTANT = 598,
- INT16CONSTANT = 599,
- UINT16CONSTANT = 600,
- INT32CONSTANT = 601,
- UINT32CONSTANT = 602,
- INTCONSTANT = 603,
- UINTCONSTANT = 604,
- INT64CONSTANT = 605,
- UINT64CONSTANT = 606,
- BOOLCONSTANT = 607,
- FLOAT16CONSTANT = 608,
- LEFT_OP = 609,
- RIGHT_OP = 610,
- INC_OP = 611,
- DEC_OP = 612,
- LE_OP = 613,
- GE_OP = 614,
- EQ_OP = 615,
- NE_OP = 616,
- AND_OP = 617,
- OR_OP = 618,
- XOR_OP = 619,
- MUL_ASSIGN = 620,
- DIV_ASSIGN = 621,
- ADD_ASSIGN = 622,
- MOD_ASSIGN = 623,
- LEFT_ASSIGN = 624,
- RIGHT_ASSIGN = 625,
- AND_ASSIGN = 626,
- XOR_ASSIGN = 627,
- OR_ASSIGN = 628,
- SUB_ASSIGN = 629,
- LEFT_PAREN = 630,
- RIGHT_PAREN = 631,
- LEFT_BRACKET = 632,
- RIGHT_BRACKET = 633,
- LEFT_BRACE = 634,
- RIGHT_BRACE = 635,
- DOT = 636,
- COMMA = 637,
- COLON = 638,
- EQUAL = 639,
- SEMICOLON = 640,
- BANG = 641,
- DASH = 642,
- TILDE = 643,
- PLUS = 644,
- STAR = 645,
- SLASH = 646,
- PERCENT = 647,
- LEFT_ANGLE = 648,
- RIGHT_ANGLE = 649,
- VERTICAL_BAR = 650,
- CARET = 651,
- AMPERSAND = 652,
- QUESTION = 653,
- INVARIANT = 654,
- PRECISE = 655,
- HIGH_PRECISION = 656,
- MEDIUM_PRECISION = 657,
- LOW_PRECISION = 658,
- PRECISION = 659,
- PACKED = 660,
- RESOURCE = 661,
- SUPERP = 662
+ LEFT_OP = 548,
+ RIGHT_OP = 549,
+ INC_OP = 550,
+ DEC_OP = 551,
+ LE_OP = 552,
+ GE_OP = 553,
+ EQ_OP = 554,
+ NE_OP = 555,
+ AND_OP = 556,
+ OR_OP = 557,
+ XOR_OP = 558,
+ MUL_ASSIGN = 559,
+ DIV_ASSIGN = 560,
+ ADD_ASSIGN = 561,
+ MOD_ASSIGN = 562,
+ LEFT_ASSIGN = 563,
+ RIGHT_ASSIGN = 564,
+ AND_ASSIGN = 565,
+ XOR_ASSIGN = 566,
+ OR_ASSIGN = 567,
+ SUB_ASSIGN = 568,
+ LEFT_PAREN = 569,
+ RIGHT_PAREN = 570,
+ LEFT_BRACKET = 571,
+ RIGHT_BRACKET = 572,
+ LEFT_BRACE = 573,
+ RIGHT_BRACE = 574,
+ DOT = 575,
+ COMMA = 576,
+ COLON = 577,
+ EQUAL = 578,
+ SEMICOLON = 579,
+ BANG = 580,
+ DASH = 581,
+ TILDE = 582,
+ PLUS = 583,
+ STAR = 584,
+ SLASH = 585,
+ PERCENT = 586,
+ LEFT_ANGLE = 587,
+ RIGHT_ANGLE = 588,
+ VERTICAL_BAR = 589,
+ CARET = 590,
+ AMPERSAND = 591,
+ QUESTION = 592,
+ INVARIANT = 593,
+ HIGH_PRECISION = 594,
+ MEDIUM_PRECISION = 595,
+ LOW_PRECISION = 596,
+ PRECISION = 597,
+ PACKED = 598,
+ RESOURCE = 599,
+ SUPERP = 600,
+ FLOATCONSTANT = 601,
+ INTCONSTANT = 602,
+ UINTCONSTANT = 603,
+ BOOLCONSTANT = 604,
+ IDENTIFIER = 605,
+ TYPE_NAME = 606,
+ CENTROID = 607,
+ IN = 608,
+ OUT = 609,
+ INOUT = 610,
+ STRUCT = 611,
+ VOID = 612,
+ WHILE = 613,
+ BREAK = 614,
+ CONTINUE = 615,
+ DO = 616,
+ ELSE = 617,
+ FOR = 618,
+ IF = 619,
+ DISCARD = 620,
+ RETURN = 621,
+ SWITCH = 622,
+ CASE = 623,
+ DEFAULT = 624,
+ UNIFORM = 625,
+ SHARED = 626,
+ BUFFER = 627,
+ FLAT = 628,
+ SMOOTH = 629,
+ LAYOUT = 630,
+ DOUBLECONSTANT = 631,
+ INT16CONSTANT = 632,
+ UINT16CONSTANT = 633,
+ FLOAT16CONSTANT = 634,
+ INT32CONSTANT = 635,
+ UINT32CONSTANT = 636,
+ INT64CONSTANT = 637,
+ UINT64CONSTANT = 638,
+ SUBROUTINE = 639,
+ DEMOTE = 640,
+ PAYLOADNV = 641,
+ PAYLOADINNV = 642,
+ HITATTRNV = 643,
+ CALLDATANV = 644,
+ CALLDATAINNV = 645,
+ PATCH = 646,
+ SAMPLE = 647,
+ NONUNIFORM = 648,
+ COHERENT = 649,
+ VOLATILE = 650,
+ RESTRICT = 651,
+ READONLY = 652,
+ WRITEONLY = 653,
+ DEVICECOHERENT = 654,
+ QUEUEFAMILYCOHERENT = 655,
+ WORKGROUPCOHERENT = 656,
+ SUBGROUPCOHERENT = 657,
+ NONPRIVATE = 658,
+ NOPERSPECTIVE = 659,
+ EXPLICITINTERPAMD = 660,
+ PERVERTEXNV = 661,
+ PERPRIMITIVENV = 662,
+ PERVIEWNV = 663,
+ PERTASKNV = 664,
+ PRECISE = 665
};
#endif
@@ -458,7 +461,7 @@ extern int yydebug;
union YYSTYPE
{
-#line 71 "MachineIndependent/glslang.y" /* yacc.c:1909 */
+#line 96 "MachineIndependent/glslang.y" /* yacc.c:1909 */
struct {
glslang::TSourceLoc loc;
@@ -494,7 +497,7 @@ union YYSTYPE
glslang::TArraySizes* typeParameters;
} interm;
-#line 498 "MachineIndependent/glslang_tab.cpp.h" /* yacc.c:1909 */
+#line 501 "MachineIndependent/glslang_tab.cpp.h" /* yacc.c:1909 */
};
typedef union YYSTYPE YYSTYPE;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp b/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp
index 5e2eed16ed..3a93aedafb 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/intermOut.cpp
@@ -35,6 +35,8 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+#ifndef GLSLANG_WEB
+
#include "localintermediate.h"
#include "../Include/InfoSink.h"
@@ -174,7 +176,7 @@ bool TOutputTraverser::visitBinary(TVisit /* visit */, TIntermBinary* node)
case EOpIndexIndirect: out.debug << "indirect index"; break;
case EOpIndexDirectStruct:
{
- bool reference = node->getLeft()->getType().getBasicType() == EbtReference;
+ bool reference = node->getLeft()->getType().isReference();
const TTypeList *members = reference ? node->getLeft()->getType().getReferentType()->getStruct() : node->getLeft()->getType().getStruct();
out.debug << (*members)[node->getRight()->getAsConstantUnion()->getConstArray()[0].getIConst()].type->getFieldName();
out.debug << ": direct index for structure"; break;
@@ -211,6 +213,13 @@ bool TOutputTraverser::visitBinary(TVisit /* visit */, TIntermBinary* node)
case EOpLogicalXor: out.debug << "logical-xor"; break;
case EOpLogicalAnd: out.debug << "logical-and"; break;
+ case EOpAbsDifference: out.debug << "absoluteDifference"; break;
+ case EOpAddSaturate: out.debug << "addSaturate"; break;
+ case EOpSubSaturate: out.debug << "subtractSaturate"; break;
+ case EOpAverage: out.debug << "average"; break;
+ case EOpAverageRounded: out.debug << "averageRounded"; break;
+ case EOpMul32x16: out.debug << "multiply32x16"; break;
+
default: out.debug << "<unknown op>";
}
@@ -555,6 +564,9 @@ bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node)
case EOpFindLSB: out.debug << "findLSB"; break;
case EOpFindMSB: out.debug << "findMSB"; break;
+ case EOpCountLeadingZeros: out.debug << "countLeadingZeros"; break;
+ case EOpCountTrailingZeros: out.debug << "countTrailingZeros"; break;
+
case EOpNoise: out.debug << "noise"; break;
case EOpBallot: out.debug << "ballot"; break;
@@ -615,7 +627,6 @@ bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node)
case EOpSubgroupQuadSwapVertical: out.debug << "subgroupQuadSwapVertical"; break;
case EOpSubgroupQuadSwapDiagonal: out.debug << "subgroupQuadSwapDiagonal"; break;
-#ifdef NV_EXTENSIONS
case EOpSubgroupPartition: out.debug << "subgroupPartitionNV"; break;
case EOpSubgroupPartitionedAdd: out.debug << "subgroupPartitionedAddNV"; break;
case EOpSubgroupPartitionedMul: out.debug << "subgroupPartitionedMulNV"; break;
@@ -638,7 +649,6 @@ bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node)
case EOpSubgroupPartitionedExclusiveAnd: out.debug << "subgroupPartitionedExclusiveAndNV"; break;
case EOpSubgroupPartitionedExclusiveOr: out.debug << "subgroupPartitionedExclusiveOrNV"; break;
case EOpSubgroupPartitionedExclusiveXor: out.debug << "subgroupPartitionedExclusiveXorNV"; break;
-#endif
case EOpClip: out.debug << "clip"; break;
case EOpIsFinite: out.debug << "isfinite"; break;
@@ -648,7 +658,6 @@ bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node)
case EOpSparseTexelsResident: out.debug << "sparseTexelsResident"; break;
-#ifdef AMD_EXTENSIONS
case EOpMinInvocations: out.debug << "minInvocations"; break;
case EOpMaxInvocations: out.debug << "maxInvocations"; break;
case EOpAddInvocations: out.debug << "addInvocations"; break;
@@ -677,7 +686,6 @@ bool TOutputTraverser::visitUnary(TVisit /* visit */, TIntermUnary* node)
case EOpCubeFaceIndex: out.debug << "cubeFaceIndex"; break;
case EOpCubeFaceCoord: out.debug << "cubeFaceCoord"; break;
-#endif
case EOpSubpassLoad: out.debug << "subpassLoad"; break;
case EOpSubpassLoadMS: out.debug << "subpassLoadMS"; break;
@@ -863,7 +871,6 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpReadInvocation: out.debug << "readInvocation"; break;
-#ifdef AMD_EXTENSIONS
case EOpSwizzleInvocations: out.debug << "swizzleInvocations"; break;
case EOpSwizzleInvocationsMasked: out.debug << "swizzleInvocationsMasked"; break;
case EOpWriteInvocation: out.debug << "writeInvocation"; break;
@@ -871,9 +878,7 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpMin3: out.debug << "min3"; break;
case EOpMax3: out.debug << "max3"; break;
case EOpMid3: out.debug << "mid3"; break;
-
case EOpTime: out.debug << "time"; break;
-#endif
case EOpAtomicAdd: out.debug << "AtomicAdd"; break;
case EOpAtomicMin: out.debug << "AtomicMin"; break;
@@ -910,10 +915,8 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpImageAtomicCompSwap: out.debug << "imageAtomicCompSwap"; break;
case EOpImageAtomicLoad: out.debug << "imageAtomicLoad"; break;
case EOpImageAtomicStore: out.debug << "imageAtomicStore"; break;
-#ifdef AMD_EXTENSIONS
case EOpImageLoadLod: out.debug << "imageLoadLod"; break;
case EOpImageStoreLod: out.debug << "imageStoreLod"; break;
-#endif
case EOpTextureQuerySize: out.debug << "textureSize"; break;
case EOpTextureQueryLod: out.debug << "textureQueryLod"; break;
@@ -940,11 +943,9 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpTextureOffsetClamp: out.debug << "textureOffsetClamp"; break;
case EOpTextureGradClamp: out.debug << "textureGradClamp"; break;
case EOpTextureGradOffsetClamp: out.debug << "textureGradOffsetClamp"; break;
-#ifdef AMD_EXTENSIONS
case EOpTextureGatherLod: out.debug << "textureGatherLod"; break;
case EOpTextureGatherLodOffset: out.debug << "textureGatherLodOffset"; break;
case EOpTextureGatherLodOffsets: out.debug << "textureGatherLodOffsets"; break;
-#endif
case EOpSparseTexture: out.debug << "sparseTexture"; break;
case EOpSparseTextureOffset: out.debug << "sparseTextureOffset"; break;
@@ -962,19 +963,15 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpSparseTextureOffsetClamp: out.debug << "sparseTextureOffsetClamp"; break;
case EOpSparseTextureGradClamp: out.debug << "sparseTextureGradClamp"; break;
case EOpSparseTextureGradOffsetClamp: out.debug << "sparseTextureGradOffsetClam"; break;
-#ifdef AMD_EXTENSIONS
case EOpSparseTextureGatherLod: out.debug << "sparseTextureGatherLod"; break;
case EOpSparseTextureGatherLodOffset: out.debug << "sparseTextureGatherLodOffset"; break;
case EOpSparseTextureGatherLodOffsets: out.debug << "sparseTextureGatherLodOffsets"; break;
case EOpSparseImageLoadLod: out.debug << "sparseImageLoadLod"; break;
-#endif
-#ifdef NV_EXTENSIONS
case EOpImageSampleFootprintNV: out.debug << "imageSampleFootprintNV"; break;
case EOpImageSampleFootprintClampNV: out.debug << "imageSampleFootprintClampNV"; break;
case EOpImageSampleFootprintLodNV: out.debug << "imageSampleFootprintLodNV"; break;
case EOpImageSampleFootprintGradNV: out.debug << "imageSampleFootprintGradNV"; break;
case EOpImageSampleFootprintGradClampNV: out.debug << "mageSampleFootprintGradClampNV"; break;
-#endif
case EOpAddCarry: out.debug << "addCarry"; break;
case EOpSubBorrow: out.debug << "subBorrow"; break;
case EOpUMulExtended: out.debug << "uMulExtended"; break;
@@ -988,9 +985,7 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpInterpolateAtSample: out.debug << "interpolateAtSample"; break;
case EOpInterpolateAtOffset: out.debug << "interpolateAtOffset"; break;
-#ifdef AMD_EXTENSIONS
case EOpInterpolateAtVertex: out.debug << "interpolateAtVertex"; break;
-#endif
case EOpSinCos: out.debug << "sincos"; break;
case EOpGenMul: out.debug << "mul"; break;
@@ -1057,22 +1052,45 @@ bool TOutputTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node
case EOpSubgroupQuadSwapVertical: out.debug << "subgroupQuadSwapVertical"; break;
case EOpSubgroupQuadSwapDiagonal: out.debug << "subgroupQuadSwapDiagonal"; break;
+ case EOpSubgroupPartition: out.debug << "subgroupPartitionNV"; break;
+ case EOpSubgroupPartitionedAdd: out.debug << "subgroupPartitionedAddNV"; break;
+ case EOpSubgroupPartitionedMul: out.debug << "subgroupPartitionedMulNV"; break;
+ case EOpSubgroupPartitionedMin: out.debug << "subgroupPartitionedMinNV"; break;
+ case EOpSubgroupPartitionedMax: out.debug << "subgroupPartitionedMaxNV"; break;
+ case EOpSubgroupPartitionedAnd: out.debug << "subgroupPartitionedAndNV"; break;
+ case EOpSubgroupPartitionedOr: out.debug << "subgroupPartitionedOrNV"; break;
+ case EOpSubgroupPartitionedXor: out.debug << "subgroupPartitionedXorNV"; break;
+ case EOpSubgroupPartitionedInclusiveAdd: out.debug << "subgroupPartitionedInclusiveAddNV"; break;
+ case EOpSubgroupPartitionedInclusiveMul: out.debug << "subgroupPartitionedInclusiveMulNV"; break;
+ case EOpSubgroupPartitionedInclusiveMin: out.debug << "subgroupPartitionedInclusiveMinNV"; break;
+ case EOpSubgroupPartitionedInclusiveMax: out.debug << "subgroupPartitionedInclusiveMaxNV"; break;
+ case EOpSubgroupPartitionedInclusiveAnd: out.debug << "subgroupPartitionedInclusiveAndNV"; break;
+ case EOpSubgroupPartitionedInclusiveOr: out.debug << "subgroupPartitionedInclusiveOrNV"; break;
+ case EOpSubgroupPartitionedInclusiveXor: out.debug << "subgroupPartitionedInclusiveXorNV"; break;
+ case EOpSubgroupPartitionedExclusiveAdd: out.debug << "subgroupPartitionedExclusiveAddNV"; break;
+ case EOpSubgroupPartitionedExclusiveMul: out.debug << "subgroupPartitionedExclusiveMulNV"; break;
+ case EOpSubgroupPartitionedExclusiveMin: out.debug << "subgroupPartitionedExclusiveMinNV"; break;
+ case EOpSubgroupPartitionedExclusiveMax: out.debug << "subgroupPartitionedExclusiveMaxNV"; break;
+ case EOpSubgroupPartitionedExclusiveAnd: out.debug << "subgroupPartitionedExclusiveAndNV"; break;
+ case EOpSubgroupPartitionedExclusiveOr: out.debug << "subgroupPartitionedExclusiveOrNV"; break;
+ case EOpSubgroupPartitionedExclusiveXor: out.debug << "subgroupPartitionedExclusiveXorNV"; break;
+
case EOpSubpassLoad: out.debug << "subpassLoad"; break;
case EOpSubpassLoadMS: out.debug << "subpassLoadMS"; break;
-#ifdef NV_EXTENSIONS
case EOpTraceNV: out.debug << "traceNV"; break;
case EOpReportIntersectionNV: out.debug << "reportIntersectionNV"; break;
case EOpIgnoreIntersectionNV: out.debug << "ignoreIntersectionNV"; break;
case EOpTerminateRayNV: out.debug << "terminateRayNV"; break;
case EOpExecuteCallableNV: out.debug << "executeCallableNV"; break;
case EOpWritePackedPrimitiveIndices4x8NV: out.debug << "writePackedPrimitiveIndices4x8NV"; break;
-#endif
case EOpCooperativeMatrixLoad: out.debug << "Load cooperative matrix"; break;
case EOpCooperativeMatrixStore: out.debug << "Store cooperative matrix"; break;
case EOpCooperativeMatrixMulAdd: out.debug << "MulAdd cooperative matrices"; break;
+ case EOpIsHelperInvocation: out.debug << "IsHelperInvocation"; break;
+
default: out.debug.message(EPrefixError, "Bad aggregation op");
}
@@ -1367,6 +1385,7 @@ bool TOutputTraverser::visitBranch(TVisit /* visit*/, TIntermBranch* node)
case EOpContinue: out.debug << "Branch: Continue"; break;
case EOpReturn: out.debug << "Branch: Return"; break;
case EOpCase: out.debug << "case: "; break;
+ case EOpDemote: out.debug << "Demote"; break;
case EOpDefault: out.debug << "default: "; break;
default: out.debug << "Branch: Unknown Branch"; break;
}
@@ -1477,18 +1496,17 @@ void TIntermediate::output(TInfoSink& infoSink, bool tree)
}
infoSink.debug << "\n";
}
+ if (interlockOrdering != EioNone)
+ infoSink.debug << "interlock ordering = " << TQualifier::getInterlockOrderingString(interlockOrdering) << "\n";
break;
-#ifdef NV_EXTENSIONS
case EShLangMeshNV:
infoSink.debug << "max_vertices = " << vertices << "\n";
infoSink.debug << "max_primitives = " << primitives << "\n";
infoSink.debug << "output primitive = " << TQualifier::getGeometryString(outputPrimitive) << "\n";
// Fall through
-
case EShLangTaskNV:
// Fall through
-#endif
case EShLangCompute:
infoSink.debug << "local_size = (" << localSize[0] << ", " << localSize[1] << ", " << localSize[2] << ")\n";
{
@@ -1517,3 +1535,5 @@ void TIntermediate::output(TInfoSink& infoSink, bool tree)
}
} // end namespace glslang
+
+#endif // not GLSLANG_WEB \ No newline at end of file
diff --git a/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp b/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp
index 46c7558378..3262c0a203 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/iomapper.cpp
@@ -33,16 +33,13 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+#ifndef GLSLANG_WEB
+
#include "../Include/Common.h"
#include "../Include/InfoSink.h"
-#include "iomapper.h"
-#include "LiveTraverser.h"
-#include "localintermediate.h"
#include "gl_types.h"
-
-#include <unordered_set>
-#include <unordered_map>
+#include "iomapper.h"
//
// Map IO bindings.
@@ -61,60 +58,9 @@
// c. implicit dead bindings are left un-bound.
//
-
namespace glslang {
-struct TVarEntryInfo
-{
- int id;
- TIntermSymbol* symbol;
- bool live;
- int newBinding;
- int newSet;
- int newLocation;
- int newComponent;
- int newIndex;
-
- struct TOrderById
- {
- inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r)
- {
- return l.id < r.id;
- }
- };
-
- struct TOrderByPriority
- {
- // ordering:
- // 1) has both binding and set
- // 2) has binding but no set
- // 3) has no binding but set
- // 4) has no binding and no set
- inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r)
- {
- const TQualifier& lq = l.symbol->getQualifier();
- const TQualifier& rq = r.symbol->getQualifier();
-
- // simple rules:
- // has binding gives 2 points
- // has set gives 1 point
- // who has the most points is more important.
- int lPoints = (lq.hasBinding() ? 2 : 0) + (lq.hasSet() ? 1 : 0);
- int rPoints = (rq.hasBinding() ? 2 : 0) + (rq.hasSet() ? 1 : 0);
-
- if (lPoints == rPoints)
- return l.id < r.id;
- return lPoints > rPoints;
- }
- };
-};
-
-
-
-typedef std::vector<TVarEntryInfo> TVarLiveMap;
-
-class TVarGatherTraverser : public TLiveTraverser
-{
+class TVarGatherTraverser : public TLiveTraverser {
public:
TVarGatherTraverser(const TIntermediate& i, bool traverseDeadCode, TVarLiveMap& inList, TVarLiveMap& outList, TVarLiveMap& uniformList)
: TLiveTraverser(i, traverseDeadCode, true, true, false)
@@ -124,7 +70,6 @@ public:
{
}
-
virtual void visitSymbol(TIntermSymbol* base)
{
TVarLiveMap* target = nullptr;
@@ -132,16 +77,17 @@ public:
target = &inputList;
else if (base->getQualifier().storage == EvqVaryingOut)
target = &outputList;
- else if (base->getQualifier().isUniformOrBuffer() && !base->getQualifier().layoutPushConstant)
+ else if (base->getQualifier().isUniformOrBuffer() && !base->getQualifier().isPushConstant())
target = &uniformList;
-
if (target) {
- TVarEntryInfo ent = { base->getId(), base, !traverseAll };
- TVarLiveMap::iterator at = std::lower_bound(target->begin(), target->end(), ent, TVarEntryInfo::TOrderById());
- if (at != target->end() && at->id == ent.id)
- at->live = at->live || !traverseAll; // update live state
+ TVarEntryInfo ent = {base->getId(), base, ! traverseAll};
+ ent.stage = intermediate.getStage();
+ TVarLiveMap::iterator at = target->find(
+ ent.symbol->getName()); // std::lower_bound(target->begin(), target->end(), ent, TVarEntryInfo::TOrderById());
+ if (at != target->end() && at->second.id == ent.id)
+ at->second.live = at->second.live || ! traverseAll; // update live state
else
- target->insert(at, ent);
+ (*target)[ent.symbol->getName()] = ent;
}
}
@@ -162,9 +108,7 @@ public:
{
}
-
- virtual void visitSymbol(TIntermSymbol* base)
- {
+ virtual void visitSymbol(TIntermSymbol* base) {
const TVarLiveMap* source;
if (base->getQualifier().storage == EvqVaryingIn)
source = &inputList;
@@ -176,23 +120,23 @@ public:
return;
TVarEntryInfo ent = { base->getId() };
- TVarLiveMap::const_iterator at = std::lower_bound(source->begin(), source->end(), ent, TVarEntryInfo::TOrderById());
+ TVarLiveMap::const_iterator at = source->find(base->getName());
if (at == source->end())
return;
- if (at->id != ent.id)
+ if (at->second.id != ent.id)
return;
- if (at->newBinding != -1)
- base->getWritableType().getQualifier().layoutBinding = at->newBinding;
- if (at->newSet != -1)
- base->getWritableType().getQualifier().layoutSet = at->newSet;
- if (at->newLocation != -1)
- base->getWritableType().getQualifier().layoutLocation = at->newLocation;
- if (at->newComponent != -1)
- base->getWritableType().getQualifier().layoutComponent = at->newComponent;
- if (at->newIndex != -1)
- base->getWritableType().getQualifier().layoutIndex = at->newIndex;
+ if (at->second.newBinding != -1)
+ base->getWritableType().getQualifier().layoutBinding = at->second.newBinding;
+ if (at->second.newSet != -1)
+ base->getWritableType().getQualifier().layoutSet = at->second.newSet;
+ if (at->second.newLocation != -1)
+ base->getWritableType().getQualifier().layoutLocation = at->second.newLocation;
+ if (at->second.newComponent != -1)
+ base->getWritableType().getQualifier().layoutComponent = at->second.newComponent;
+ if (at->second.newIndex != -1)
+ base->getWritableType().getQualifier().layoutIndex = at->second.newIndex;
}
private:
@@ -210,10 +154,12 @@ struct TNotifyUniformAdaptor
, resolver(r)
{
}
- inline void operator()(TVarEntryInfo& ent)
+
+ inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey)
{
- resolver.notifyBinding(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), ent.live);
+ resolver.notifyBinding(stage, entKey.second);
}
+
private:
TNotifyUniformAdaptor& operator=(TNotifyUniformAdaptor&);
};
@@ -222,49 +168,46 @@ struct TNotifyInOutAdaptor
{
EShLanguage stage;
TIoMapResolver& resolver;
- inline TNotifyInOutAdaptor(EShLanguage s, TIoMapResolver& r)
+ inline TNotifyInOutAdaptor(EShLanguage s, TIoMapResolver& r)
: stage(s)
, resolver(r)
{
}
- inline void operator()(TVarEntryInfo& ent)
+
+ inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey)
{
- resolver.notifyInOut(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), ent.live);
+ resolver.notifyInOut(stage, entKey.second);
}
+
private:
TNotifyInOutAdaptor& operator=(TNotifyInOutAdaptor&);
};
-struct TResolverUniformAdaptor
-{
- TResolverUniformAdaptor(EShLanguage s, TIoMapResolver& r, TInfoSink& i, bool& e, TIntermediate& interm)
+struct TResolverUniformAdaptor {
+ TResolverUniformAdaptor(EShLanguage s, TIoMapResolver& r, TInfoSink& i, bool& e)
: stage(s)
, resolver(r)
, infoSink(i)
, error(e)
- , intermediate(interm)
{
}
- inline void operator()(TVarEntryInfo& ent)
- {
+ inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey) {
+ TVarEntryInfo& ent = entKey.second;
ent.newLocation = -1;
ent.newComponent = -1;
ent.newBinding = -1;
ent.newSet = -1;
ent.newIndex = -1;
- const bool isValid = resolver.validateBinding(stage, ent.symbol->getName().c_str(), ent.symbol->getType(),
- ent.live);
+ const bool isValid = resolver.validateBinding(stage, ent);
if (isValid) {
- ent.newBinding = resolver.resolveBinding(stage, ent.symbol->getName().c_str(), ent.symbol->getType(),
- ent.live);
- ent.newSet = resolver.resolveSet(stage, ent.symbol->getName().c_str(), ent.symbol->getType(), ent.live);
- ent.newLocation = resolver.resolveUniformLocation(stage, ent.symbol->getName().c_str(),
- ent.symbol->getType(), ent.live);
+ resolver.resolveBinding(stage, ent);
+ resolver.resolveSet(stage, ent);
+ resolver.resolveUniformLocation(stage, ent);
if (ent.newBinding != -1) {
if (ent.newBinding >= int(TQualifier::layoutBindingEnd)) {
- TString err = "mapped binding out of range: " + ent.symbol->getName();
+ TString err = "mapped binding out of range: " + entKey.first;
infoSink.info.message(EPrefixInternalError, err.c_str());
error = true;
@@ -272,64 +215,52 @@ struct TResolverUniformAdaptor
}
if (ent.newSet != -1) {
if (ent.newSet >= int(TQualifier::layoutSetEnd)) {
- TString err = "mapped set out of range: " + ent.symbol->getName();
+ TString err = "mapped set out of range: " + entKey.first;
infoSink.info.message(EPrefixInternalError, err.c_str());
error = true;
}
}
} else {
- TString errorMsg = "Invalid binding: " + ent.symbol->getName();
+ TString errorMsg = "Invalid binding: " + entKey.first;
infoSink.info.message(EPrefixInternalError, errorMsg.c_str());
error = true;
}
}
+ inline void setStage(EShLanguage s) { stage = s; }
+
EShLanguage stage;
TIoMapResolver& resolver;
TInfoSink& infoSink;
bool& error;
- TIntermediate& intermediate;
private:
TResolverUniformAdaptor& operator=(TResolverUniformAdaptor&);
};
-struct TResolverInOutAdaptor
-{
- TResolverInOutAdaptor(EShLanguage s, TIoMapResolver& r, TInfoSink& i, bool& e, TIntermediate& interm)
+struct TResolverInOutAdaptor {
+ TResolverInOutAdaptor(EShLanguage s, TIoMapResolver& r, TInfoSink& i, bool& e)
: stage(s)
, resolver(r)
, infoSink(i)
, error(e)
- , intermediate(interm)
{
}
- inline void operator()(TVarEntryInfo& ent)
+ inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey)
{
+ TVarEntryInfo& ent = entKey.second;
ent.newLocation = -1;
ent.newComponent = -1;
ent.newBinding = -1;
ent.newSet = -1;
ent.newIndex = -1;
- const bool isValid = resolver.validateInOut(stage,
- ent.symbol->getName().c_str(),
- ent.symbol->getType(),
- ent.live);
+ const bool isValid = resolver.validateInOut(stage, ent);
if (isValid) {
- ent.newLocation = resolver.resolveInOutLocation(stage,
- ent.symbol->getName().c_str(),
- ent.symbol->getType(),
- ent.live);
- ent.newComponent = resolver.resolveInOutComponent(stage,
- ent.symbol->getName().c_str(),
- ent.symbol->getType(),
- ent.live);
- ent.newIndex = resolver.resolveInOutIndex(stage,
- ent.symbol->getName().c_str(),
- ent.symbol->getType(),
- ent.live);
+ resolver.resolveInOutLocation(stage, ent);
+ resolver.resolveInOutComponent(stage, ent);
+ resolver.resolveInOutIndex(stage, ent);
} else {
TString errorMsg;
if (ent.symbol->getType().getQualifier().semanticName != nullptr) {
@@ -344,218 +275,621 @@ struct TResolverInOutAdaptor
}
}
+ inline void setStage(EShLanguage s) { stage = s; }
+
EShLanguage stage;
TIoMapResolver& resolver;
TInfoSink& infoSink;
bool& error;
- TIntermediate& intermediate;
private:
TResolverInOutAdaptor& operator=(TResolverInOutAdaptor&);
};
-// Base class for shared TIoMapResolver services, used by several derivations.
-struct TDefaultIoResolverBase : public glslang::TIoMapResolver
-{
- TDefaultIoResolverBase(const TIntermediate &intermediate) :
- intermediate(intermediate),
- nextUniformLocation(intermediate.getUniformLocationBase()),
- nextInputLocation(0),
- nextOutputLocation(0)
- { }
+// The class is used for reserving explicit uniform locations and ubo/ssbo/opaque bindings
- int getBaseBinding(TResourceType res, unsigned int set) const {
- return selectBaseBinding(intermediate.getShiftBinding(res),
- intermediate.getShiftBindingForSet(res, set));
+struct TSymbolValidater
+{
+ TSymbolValidater(TIoMapResolver& r, TInfoSink& i, TVarLiveMap* in[EShLangCount], TVarLiveMap* out[EShLangCount],
+ TVarLiveMap* uniform[EShLangCount], bool& hadError)
+ : preStage(EShLangCount)
+ , currentStage(EShLangCount)
+ , nextStage(EShLangCount)
+ , resolver(r)
+ , infoSink(i)
+ , hadError(hadError)
+ {
+ memcpy(inVarMaps, in, EShLangCount * (sizeof(TVarLiveMap*)));
+ memcpy(outVarMaps, out, EShLangCount * (sizeof(TVarLiveMap*)));
+ memcpy(uniformVarMap, uniform, EShLangCount * (sizeof(TVarLiveMap*)));
}
- const std::vector<std::string>& getResourceSetBinding() const { return intermediate.getResourceSetBinding(); }
-
- bool doAutoBindingMapping() const { return intermediate.getAutoMapBindings(); }
- bool doAutoLocationMapping() const { return intermediate.getAutoMapLocations(); }
+ inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey) {
+ TVarEntryInfo& ent1 = entKey.second;
+ TIntermSymbol* base = ent1.symbol;
+ const TType& type = ent1.symbol->getType();
+ const TString& name = entKey.first;
+ TString mangleName1, mangleName2;
+ type.appendMangledName(mangleName1);
+ EShLanguage stage = ent1.stage;
+ if (currentStage != stage) {
+ preStage = currentStage;
+ currentStage = stage;
+ nextStage = EShLangCount;
+ for (int i = currentStage + 1; i < EShLangCount; i++) {
+ if (inVarMaps[i] != nullptr)
+ nextStage = static_cast<EShLanguage>(i);
+ }
+ }
+ if (base->getQualifier().storage == EvqVaryingIn) {
+ // validate stage in;
+ if (preStage == EShLangCount)
+ return;
+ if (outVarMaps[preStage] != nullptr) {
+ auto ent2 = outVarMaps[preStage]->find(name);
+ if (ent2 != outVarMaps[preStage]->end()) {
+ ent2->second.symbol->getType().appendMangledName(mangleName2);
+ if (mangleName1 == mangleName2)
+ return;
+ else {
+ TString err = "Invalid In/Out variable type : " + entKey.first;
+ infoSink.info.message(EPrefixInternalError, err.c_str());
+ hadError = true;
+ }
+ }
+ return;
+ }
+ } else if (base->getQualifier().storage == EvqVaryingOut) {
+ // validate stage out;
+ if (nextStage == EShLangCount)
+ return;
+ if (outVarMaps[nextStage] != nullptr) {
+ auto ent2 = inVarMaps[nextStage]->find(name);
+ if (ent2 != inVarMaps[nextStage]->end()) {
+ ent2->second.symbol->getType().appendMangledName(mangleName2);
+ if (mangleName1 == mangleName2)
+ return;
+ else {
+ TString err = "Invalid In/Out variable type : " + entKey.first;
+ infoSink.info.message(EPrefixInternalError, err.c_str());
+ hadError = true;
+ }
+ }
+ return;
+ }
+ } else if (base->getQualifier().isUniformOrBuffer() && ! base->getQualifier().isPushConstant()) {
+ // validate uniform type;
+ for (int i = 0; i < EShLangCount; i++) {
+ if (i != currentStage && outVarMaps[i] != nullptr) {
+ auto ent2 = uniformVarMap[i]->find(name);
+ if (ent2 != uniformVarMap[i]->end()) {
+ ent2->second.symbol->getType().appendMangledName(mangleName2);
+ if (mangleName1 != mangleName2) {
+ TString err = "Invalid Uniform variable type : " + entKey.first;
+ infoSink.info.message(EPrefixInternalError, err.c_str());
+ hadError = true;
+ }
+ mangleName2.clear();
+ }
+ }
+ }
+ }
+ }
+ TVarLiveMap *inVarMaps[EShLangCount], *outVarMaps[EShLangCount], *uniformVarMap[EShLangCount];
+ // Use for mark pre stage, to get more interface symbol information.
+ EShLanguage preStage, currentStage, nextStage;
+ // Use for mark current shader stage for resolver
+ TIoMapResolver& resolver;
+ TInfoSink& infoSink;
+ bool& hadError;
- typedef std::vector<int> TSlotSet;
- typedef std::unordered_map<int, TSlotSet> TSlotSetMap;
- TSlotSetMap slots;
+private:
+ TSymbolValidater& operator=(TSymbolValidater&);
+};
- TSlotSet::iterator findSlot(int set, int slot)
- {
- return std::lower_bound(slots[set].begin(), slots[set].end(), slot);
- }
+struct TSlotCollector {
+ TSlotCollector(TIoMapResolver& r, TInfoSink& i) : resolver(r), infoSink(i) { }
- bool checkEmpty(int set, int slot)
- {
- TSlotSet::iterator at = findSlot(set, slot);
- return !(at != slots[set].end() && *at == slot);
+ inline void operator()(std::pair<const TString, TVarEntryInfo>& entKey) {
+ resolver.reserverStorageSlot(entKey.second, infoSink);
+ resolver.reserverResourceSlot(entKey.second, infoSink);
}
+ TIoMapResolver& resolver;
+ TInfoSink& infoSink;
- int reserveSlot(int set, int slot, int size = 1)
- {
- TSlotSet::iterator at = findSlot(set, slot);
+private:
+ TSlotCollector& operator=(TSlotCollector&);
+};
- // tolerate aliasing, by not double-recording aliases
- // (policy about appropriateness of the alias is higher up)
- for (int i = 0; i < size; i++) {
- if (at == slots[set].end() || *at != slot + i)
- at = slots[set].insert(at, slot + i);
- ++at;
- }
+TDefaultIoResolverBase::TDefaultIoResolverBase(const TIntermediate& intermediate)
+ : intermediate(intermediate)
+ , nextUniformLocation(intermediate.getUniformLocationBase())
+ , nextInputLocation(0)
+ , nextOutputLocation(0)
+{
+ memset(stageMask, false, sizeof(bool) * (EShLangCount + 1));
+}
- return slot;
- }
+int TDefaultIoResolverBase::getBaseBinding(TResourceType res, unsigned int set) const {
+ return selectBaseBinding(intermediate.getShiftBinding(res), intermediate.getShiftBindingForSet(res, set));
+}
- int getFreeSlot(int set, int base, int size = 1)
- {
- TSlotSet::iterator at = findSlot(set, base);
- if (at == slots[set].end())
- return reserveSlot(set, base, size);
+const std::vector<std::string>& TDefaultIoResolverBase::getResourceSetBinding() const {
+ return intermediate.getResourceSetBinding();
+}
- // look for a big enough gap
- for (; at != slots[set].end(); ++at) {
- if (*at - base >= size)
- break;
- base = *at + 1;
- }
- return reserveSlot(set, base, size);
- }
+bool TDefaultIoResolverBase::doAutoBindingMapping() const { return intermediate.getAutoMapBindings(); }
- virtual bool validateBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool /*is_live*/) override = 0;
+bool TDefaultIoResolverBase::doAutoLocationMapping() const { return intermediate.getAutoMapLocations(); }
- virtual int resolveBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override = 0;
+TDefaultIoResolverBase::TSlotSet::iterator TDefaultIoResolverBase::findSlot(int set, int slot) {
+ return std::lower_bound(slots[set].begin(), slots[set].end(), slot);
+}
- int resolveSet(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool /*is_live*/) override
- {
- if (type.getQualifier().hasSet())
- return type.getQualifier().layoutSet;
+bool TDefaultIoResolverBase::checkEmpty(int set, int slot) {
+ TSlotSet::iterator at = findSlot(set, slot);
+ return ! (at != slots[set].end() && *at == slot);
+}
- // If a command line or API option requested a single descriptor set, use that (if not overrided by spaceN)
- if (getResourceSetBinding().size() == 1)
- return atoi(getResourceSetBinding()[0].c_str());
+int TDefaultIoResolverBase::reserveSlot(int set, int slot, int size) {
+ TSlotSet::iterator at = findSlot(set, slot);
+ // tolerate aliasing, by not double-recording aliases
+ // (policy about appropriateness of the alias is higher up)
+ for (int i = 0; i < size; i++) {
+ if (at == slots[set].end() || *at != slot + i)
+ at = slots[set].insert(at, slot + i);
+ ++at;
+ }
+ return slot;
+}
- return 0;
+int TDefaultIoResolverBase::getFreeSlot(int set, int base, int size) {
+ TSlotSet::iterator at = findSlot(set, base);
+ if (at == slots[set].end())
+ return reserveSlot(set, base, size);
+ // look for a big enough gap
+ for (; at != slots[set].end(); ++at) {
+ if (*at - base >= size)
+ break;
+ base = *at + 1;
}
- int resolveUniformLocation(EShLanguage /*stage*/, const char* name, const glslang::TType& type, bool /*is_live*/) override
- {
- // kick out of not doing this
- if (!doAutoLocationMapping())
- return -1;
+ return reserveSlot(set, base, size);
+}
- // no locations added if already present, a built-in variable, a block, or an opaque
- if (type.getQualifier().hasLocation() || type.isBuiltIn() ||
- type.getBasicType() == EbtBlock ||
- type.getBasicType() == EbtAtomicUint ||
- (type.containsOpaque() && intermediate.getSpv().openGl == 0))
- return -1;
+int TDefaultIoResolverBase::resolveSet(EShLanguage /*stage*/, TVarEntryInfo& ent) {
+ const TType& type = ent.symbol->getType();
+ if (type.getQualifier().hasSet()) {
+ return ent.newSet = type.getQualifier().layoutSet;
+ }
+ // If a command line or API option requested a single descriptor set, use that (if not overrided by spaceN)
+ if (getResourceSetBinding().size() == 1) {
+ return ent.newSet = atoi(getResourceSetBinding()[0].c_str());
+ }
+ return ent.newSet = 0;
+}
- // no locations on blocks of built-in variables
- if (type.isStruct()) {
- if (type.getStruct()->size() < 1)
- return -1;
- if ((*type.getStruct())[0].type->isBuiltIn())
- return -1;
+int TDefaultIoResolverBase::resolveUniformLocation(EShLanguage /*stage*/, TVarEntryInfo& ent) {
+ const TType& type = ent.symbol->getType();
+ const char* name = ent.symbol->getName().c_str();
+ // kick out of not doing this
+ if (! doAutoLocationMapping()) {
+ return ent.newLocation = -1;
+ }
+ // no locations added if already present, a built-in variable, a block, or an opaque
+ if (type.getQualifier().hasLocation() || type.isBuiltIn() || type.getBasicType() == EbtBlock ||
+ type.isAtomic() || (type.containsOpaque() && intermediate.getSpv().openGl == 0)) {
+ return ent.newLocation = -1;
+ }
+ // no locations on blocks of built-in variables
+ if (type.isStruct()) {
+ if (type.getStruct()->size() < 1) {
+ return ent.newLocation = -1;
}
+ if ((*type.getStruct())[0].type->isBuiltIn()) {
+ return ent.newLocation = -1;
+ }
+ }
+ int location = intermediate.getUniformLocationOverride(name);
+ if (location != -1) {
+ return ent.newLocation = location;
+ }
+ location = nextUniformLocation;
+ nextUniformLocation += TIntermediate::computeTypeUniformLocationSize(type);
+ return ent.newLocation = location;
+}
- int location = intermediate.getUniformLocationOverride(name);
- if (location != -1)
- return location;
+int TDefaultIoResolverBase::resolveInOutLocation(EShLanguage stage, TVarEntryInfo& ent) {
+ const TType& type = ent.symbol->getType();
+ // kick out of not doing this
+ if (! doAutoLocationMapping()) {
+ return ent.newLocation = -1;
+ }
+
+ // no locations added if already present, or a built-in variable
+ if (type.getQualifier().hasLocation() || type.isBuiltIn()) {
+ return ent.newLocation = -1;
+ }
+
+ // no locations on blocks of built-in variables
+ if (type.isStruct()) {
+ if (type.getStruct()->size() < 1) {
+ return ent.newLocation = -1;
+ }
+ if ((*type.getStruct())[0].type->isBuiltIn()) {
+ return ent.newLocation = -1;
+ }
+ }
+ // point to the right input or output location counter
+ int& nextLocation = type.getQualifier().isPipeInput() ? nextInputLocation : nextOutputLocation;
+ // Placeholder. This does not do proper cross-stage lining up, nor
+ // work with mixed location/no-location declarations.
+ int location = nextLocation;
+ int typeLocationSize;
+ // Don’t take into account the outer-most array if the stage’s
+ // interface is automatically an array.
+ typeLocationSize = computeTypeLocationSize(type, stage);
+ nextLocation += typeLocationSize;
+ return ent.newLocation = location;
+}
- location = nextUniformLocation;
+int TDefaultIoResolverBase::resolveInOutComponent(EShLanguage /*stage*/, TVarEntryInfo& ent) {
+ return ent.newComponent = -1;
+}
- nextUniformLocation += TIntermediate::computeTypeUniformLocationSize(type);
+int TDefaultIoResolverBase::resolveInOutIndex(EShLanguage /*stage*/, TVarEntryInfo& ent) { return ent.newIndex = -1; }
+
+uint32_t TDefaultIoResolverBase::computeTypeLocationSize(const TType& type, EShLanguage stage) {
+ int typeLocationSize;
+ // Don’t take into account the outer-most array if the stage’s
+ // interface is automatically an array.
+ if (type.getQualifier().isArrayedIo(stage)) {
+ TType elementType(type, 0);
+ typeLocationSize = TIntermediate::computeTypeLocationSize(elementType, stage);
+ } else {
+ typeLocationSize = TIntermediate::computeTypeLocationSize(type, stage);
+ }
+ return typeLocationSize;
+}
- return location;
+//TDefaultGlslIoResolver
+TResourceType TDefaultGlslIoResolver::getResourceType(const glslang::TType& type) {
+ if (isImageType(type)) {
+ return EResImage;
}
- bool validateInOut(EShLanguage /*stage*/, const char* /*name*/, const TType& /*type*/, bool /*is_live*/) override
- {
- return true;
+ if (isTextureType(type)) {
+ return EResTexture;
}
- int resolveInOutLocation(EShLanguage stage, const char* /*name*/, const TType& type, bool /*is_live*/) override
- {
- // kick out of not doing this
- if (!doAutoLocationMapping())
- return -1;
+ if (isSsboType(type)) {
+ return EResSsbo;
+ }
+ if (isSamplerType(type)) {
+ return EResSampler;
+ }
+ if (isUboType(type)) {
+ return EResUbo;
+ }
+ return EResCount;
+}
- // no locations added if already present, or a built-in variable
- if (type.getQualifier().hasLocation() || type.isBuiltIn())
- return -1;
+TDefaultGlslIoResolver::TDefaultGlslIoResolver(const TIntermediate& intermediate)
+ : TDefaultIoResolverBase(intermediate)
+ , preStage(EShLangCount)
+ , currentStage(EShLangCount)
+{ }
+
+int TDefaultGlslIoResolver::resolveInOutLocation(EShLanguage stage, TVarEntryInfo& ent) {
+ const TType& type = ent.symbol->getType();
+ const TString& name = ent.symbol->getName();
+ if (currentStage != stage) {
+ preStage = currentStage;
+ currentStage = stage;
+ }
+ // kick out of not doing this
+ if (! doAutoLocationMapping()) {
+ return ent.newLocation = -1;
+ }
+ // expand the location to each element if the symbol is a struct or array
+ if (type.getQualifier().hasLocation()) {
+ return ent.newLocation = type.getQualifier().layoutLocation;
+ }
+ // no locations added if already present, or a built-in variable
+ if (type.isBuiltIn()) {
+ return ent.newLocation = -1;
+ }
+ // no locations on blocks of built-in variables
+ if (type.isStruct()) {
+ if (type.getStruct()->size() < 1) {
+ return ent.newLocation = -1;
+ }
+ if ((*type.getStruct())[0].type->isBuiltIn()) {
+ return ent.newLocation = -1;
+ }
+ }
+ int typeLocationSize = computeTypeLocationSize(type, stage);
+ int location = type.getQualifier().layoutLocation;
+ bool hasLocation = false;
+ EShLanguage keyStage(EShLangCount);
+ TStorageQualifier storage;
+ storage = EvqInOut;
+ if (type.getQualifier().isPipeInput()) {
+ // If this symbol is a input, search pre stage's out
+ keyStage = preStage;
+ }
+ if (type.getQualifier().isPipeOutput()) {
+ // If this symbol is a output, search next stage's in
+ keyStage = currentStage;
+ }
+ // The in/out in current stage is not declared with location, but it is possible declared
+ // with explicit location in other stages, find the storageSlotMap firstly to check whether
+ // the in/out has location
+ int resourceKey = buildStorageKey(keyStage, storage);
+ if (! storageSlotMap[resourceKey].empty()) {
+ TVarSlotMap::iterator iter = storageSlotMap[resourceKey].find(name);
+ if (iter != storageSlotMap[resourceKey].end()) {
+ // If interface resource be found, set it has location and this symbol's new location
+ // equal the symbol's explicit location declarated in pre or next stage.
+ //
+ // vs: out vec4 a;
+ // fs: layout(..., location = 3,...) in vec4 a;
+ hasLocation = true;
+ location = iter->second;
+ // if we want deal like that:
+ // vs: layout(location=4) out vec4 a;
+ // out vec4 b;
+ //
+ // fs: in vec4 a;
+ // layout(location = 4) in vec4 b;
+ // we need retraverse the map.
+ }
+ if (! hasLocation) {
+ // If interface resource note found, It's mean the location in two stage are both implicit declarat.
+ // So we should find a new slot for this interface.
+ //
+ // vs: out vec4 a;
+ // fs: in vec4 a;
+ location = getFreeSlot(resourceKey, 0, typeLocationSize);
+ storageSlotMap[resourceKey][name] = location;
+ }
+ } else {
+ // the first interface declarated in a program.
+ TVarSlotMap varSlotMap;
+ location = getFreeSlot(resourceKey, 0, typeLocationSize);
+ varSlotMap[name] = location;
+ storageSlotMap[resourceKey] = varSlotMap;
+ }
+ //Update location
+ return ent.newLocation = location;
+}
+int TDefaultGlslIoResolver::resolveUniformLocation(EShLanguage /*stage*/, TVarEntryInfo& ent) {
+ const TType& type = ent.symbol->getType();
+ const TString& name = ent.symbol->getName();
+ // kick out of not doing this
+ if (! doAutoLocationMapping()) {
+ return ent.newLocation = -1;
+ }
+ // expand the location to each element if the symbol is a struct or array
+ if (type.getQualifier().hasLocation() && (type.isStruct() || type.isArray())) {
+ return ent.newLocation = type.getQualifier().layoutLocation;
+ } else {
+ // no locations added if already present, a built-in variable, a block, or an opaque
+ if (type.getQualifier().hasLocation() || type.isBuiltIn() || type.getBasicType() == EbtBlock ||
+ type.isAtomic() || (type.containsOpaque() && intermediate.getSpv().openGl == 0)) {
+ return ent.newLocation = -1;
+ }
// no locations on blocks of built-in variables
if (type.isStruct()) {
- if (type.getStruct()->size() < 1)
- return -1;
- if ((*type.getStruct())[0].type->isBuiltIn())
- return -1;
- }
-
- // point to the right input or output location counter
- int& nextLocation = type.getQualifier().isPipeInput() ? nextInputLocation : nextOutputLocation;
-
- // Placeholder. This does not do proper cross-stage lining up, nor
- // work with mixed location/no-location declarations.
- int location = nextLocation;
- int typeLocationSize;
- // Don’t take into account the outer-most array if the stage’s
- // interface is automatically an array.
- if (type.getQualifier().isArrayedIo(stage)) {
- TType elementType(type, 0);
- typeLocationSize = TIntermediate::computeTypeLocationSize(elementType, stage);
- } else {
- typeLocationSize = TIntermediate::computeTypeLocationSize(type, stage);
+ if (type.getStruct()->size() < 1) {
+ return ent.newLocation = -1;
+ }
+ if ((*type.getStruct())[0].type->isBuiltIn()) {
+ return ent.newLocation = -1;
+ }
}
- nextLocation += typeLocationSize;
-
- return location;
}
- int resolveInOutComponent(EShLanguage /*stage*/, const char* /*name*/, const TType& /*type*/, bool /*is_live*/) override
- {
- return -1;
+ int location = intermediate.getUniformLocationOverride(name.c_str());
+ if (location != -1) {
+ return ent.newLocation = location;
}
- int resolveInOutIndex(EShLanguage /*stage*/, const char* /*name*/, const TType& /*type*/, bool /*is_live*/) override
- {
- return -1;
+
+ int size = TIntermediate::computeTypeUniformLocationSize(type);
+
+ // The uniform in current stage is not declared with location, but it is possible declared
+ // with explicit location in other stages, find the storageSlotMap firstly to check whether
+ // the uniform has location
+ bool hasLocation = false;
+ int resourceKey = buildStorageKey(EShLangCount, EvqUniform);
+ TVarSlotMap& slotMap = storageSlotMap[resourceKey];
+ // Check dose shader program has uniform resource
+ if (! slotMap.empty()) {
+ // If uniform resource not empty, try find a same name uniform
+ TVarSlotMap::iterator iter = slotMap.find(name);
+ if (iter != slotMap.end()) {
+ // If uniform resource be found, set it has location and this symbol's new location
+ // equal the uniform's explicit location declarated in other stage.
+ //
+ // vs: uniform vec4 a;
+ // fs: layout(..., location = 3,...) uniform vec4 a;
+ hasLocation = true;
+ location = iter->second;
+ }
+ if (! hasLocation) {
+ // No explicit location declaraten in other stage.
+ // So we should find a new slot for this uniform.
+ //
+ // vs: uniform vec4 a;
+ // fs: uniform vec4 a;
+ location = getFreeSlot(resourceKey, 0, computeTypeLocationSize(type, currentStage));
+ storageSlotMap[resourceKey][name] = location;
+ }
+ } else {
+ // the first uniform declarated in a program.
+ TVarSlotMap varSlotMap;
+ location = getFreeSlot(resourceKey, 0, size);
+ varSlotMap[name] = location;
+ storageSlotMap[resourceKey] = varSlotMap;
}
+ return ent.newLocation = location;
+}
- void notifyBinding(EShLanguage, const char* /*name*/, const TType&, bool /*is_live*/) override {}
- void notifyInOut(EShLanguage, const char* /*name*/, const TType&, bool /*is_live*/) override {}
- void endNotifications(EShLanguage) override {}
- void beginNotifications(EShLanguage) override {}
- void beginResolve(EShLanguage) override {}
- void endResolve(EShLanguage) override {}
+int TDefaultGlslIoResolver::resolveBinding(EShLanguage /*stage*/, TVarEntryInfo& ent) {
+ const TType& type = ent.symbol->getType();
+ const TString& name = ent.symbol->getName();
+ // On OpenGL arrays of opaque types take a seperate binding for each element
+ int numBindings = intermediate.getSpv().openGl != 0 && type.isSizedArray() ? type.getCumulativeArraySize() : 1;
+ TResourceType resource = getResourceType(type);
+ // don't need to handle uniform symbol, it will be handled in resolveUniformLocation
+ if (resource == EResUbo && type.getBasicType() != EbtBlock) {
+ return ent.newBinding = -1;
+ }
+ // There is no 'set' qualifier in OpenGL shading language, each resource has its own
+ // binding name space, so remap the 'set' to resource type which make each resource
+ // binding is valid from 0 to MAX_XXRESOURCE_BINDINGS
+ int set = resource;
+ if (resource < EResCount) {
+ if (type.getQualifier().hasBinding()) {
+ ent.newBinding = reserveSlot(set, getBaseBinding(resource, set) + type.getQualifier().layoutBinding, numBindings);
+ return ent.newBinding;
+ } else if (ent.live && doAutoBindingMapping()) {
+ // The resource in current stage is not declared with binding, but it is possible declared
+ // with explicit binding in other stages, find the resourceSlotMap firstly to check whether
+ // the resource has binding, don't need to allocate if it already has a binding
+ bool hasBinding = false;
+ if (! resourceSlotMap[resource].empty()) {
+ TVarSlotMap::iterator iter = resourceSlotMap[resource].find(name);
+ if (iter != resourceSlotMap[resource].end()) {
+ hasBinding = true;
+ ent.newBinding = iter->second;
+ }
+ }
+ if (! hasBinding) {
+ TVarSlotMap varSlotMap;
+ // find free slot, the caller did make sure it passes all vars with binding
+ // first and now all are passed that do not have a binding and needs one
+ int binding = getFreeSlot(resource, getBaseBinding(resource, set), numBindings);
+ varSlotMap[name] = binding;
+ resourceSlotMap[resource] = varSlotMap;
+ ent.newBinding = binding;
+ }
+ return ent.newBinding;
+ }
+ }
+ return ent.newBinding = -1;
+}
-protected:
- TDefaultIoResolverBase(TDefaultIoResolverBase&);
- TDefaultIoResolverBase& operator=(TDefaultIoResolverBase&);
+void TDefaultGlslIoResolver::beginResolve(EShLanguage stage) {
+ // reset stage state
+ if (stage == EShLangCount)
+ preStage = currentStage = stage;
+ // update stage state
+ else if (currentStage != stage) {
+ preStage = currentStage;
+ currentStage = stage;
+ }
+}
- const TIntermediate &intermediate;
- int nextUniformLocation;
- int nextInputLocation;
- int nextOutputLocation;
+void TDefaultGlslIoResolver::endResolve(EShLanguage /*stage*/) {
+ // TODO nothing
+}
- // Return descriptor set specific base if there is one, and the generic base otherwise.
- int selectBaseBinding(int base, int descriptorSetBase) const {
- return descriptorSetBase != -1 ? descriptorSetBase : base;
+void TDefaultGlslIoResolver::beginCollect(EShLanguage stage) {
+ // reset stage state
+ if (stage == EShLangCount)
+ preStage = currentStage = stage;
+ // update stage state
+ else if (currentStage != stage) {
+ preStage = currentStage;
+ currentStage = stage;
}
+}
- static int getLayoutSet(const glslang::TType& type) {
- if (type.getQualifier().hasSet())
- return type.getQualifier().layoutSet;
- else
- return 0;
- }
+void TDefaultGlslIoResolver::endCollect(EShLanguage /*stage*/) {
+ // TODO nothing
+}
- static bool isSamplerType(const glslang::TType& type) {
- return type.getBasicType() == glslang::EbtSampler && type.getSampler().isPureSampler();
+void TDefaultGlslIoResolver::reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& infoSink) {
+ const TType& type = ent.symbol->getType();
+ const TString& name = ent.symbol->getName();
+ TStorageQualifier storage = type.getQualifier().storage;
+ EShLanguage stage(EShLangCount);
+ switch (storage) {
+ case EvqUniform:
+ if (type.getBasicType() != EbtBlock && type.getQualifier().hasLocation()) {
+ //
+ // Reserve the slots for the uniforms who has explicit location
+ int storageKey = buildStorageKey(EShLangCount, EvqUniform);
+ int location = type.getQualifier().layoutLocation;
+ TVarSlotMap& varSlotMap = storageSlotMap[storageKey];
+ TVarSlotMap::iterator iter = varSlotMap.find(name);
+ if (iter == varSlotMap.end()) {
+ int numLocations = TIntermediate::computeTypeUniformLocationSize(type);
+ reserveSlot(storageKey, location, numLocations);
+ varSlotMap[name] = location;
+ } else {
+ // Allocate location by name for OpenGL driver, so the uniform in different
+ // stages should be declared with the same location
+ if (iter->second != location) {
+ TString errorMsg = "Invalid location: " + name;
+ infoSink.info.message(EPrefixInternalError, errorMsg.c_str());
+ }
+ }
+ }
+ break;
+ case EvqVaryingIn:
+ case EvqVaryingOut:
+ //
+ // Reserve the slots for the inout who has explicit location
+ if (type.getQualifier().hasLocation()) {
+ stage = storage == EvqVaryingIn ? preStage : stage;
+ stage = storage == EvqVaryingOut ? currentStage : stage;
+ int storageKey = buildStorageKey(stage, EvqInOut);
+ int location = type.getQualifier().layoutLocation;
+ TVarSlotMap& varSlotMap = storageSlotMap[storageKey];
+ TVarSlotMap::iterator iter = varSlotMap.find(name);
+ if (iter == varSlotMap.end()) {
+ int numLocations = TIntermediate::computeTypeUniformLocationSize(type);
+ reserveSlot(storageKey, location, numLocations);
+ varSlotMap[name] = location;
+ } else {
+ // Allocate location by name for OpenGL driver, so the uniform in different
+ // stages should be declared with the same location
+ if (iter->second != location) {
+ TString errorMsg = "Invalid location: " + name;
+ infoSink.info.message(EPrefixInternalError, errorMsg.c_str());
+ }
+ }
+ }
+ break;
+ default:
+ break;
}
+}
- static bool isTextureType(const glslang::TType& type) {
- return (type.getBasicType() == glslang::EbtSampler &&
- (type.getSampler().isTexture() || type.getSampler().isSubpass()));
+void TDefaultGlslIoResolver::reserverResourceSlot(TVarEntryInfo& ent, TInfoSink& infoSink) {
+ const TType& type = ent.symbol->getType();
+ const TString& name = ent.symbol->getName();
+ int resource = getResourceType(type);
+ if (type.getQualifier().hasBinding()) {
+ TVarSlotMap& varSlotMap = resourceSlotMap[resource];
+ TVarSlotMap::iterator iter = varSlotMap.find(name);
+ int binding = type.getQualifier().layoutBinding;
+ if (iter == varSlotMap.end()) {
+ // Reserve the slots for the ubo, ssbo and opaques who has explicit binding
+ int numBindings = type.isSizedArray() ? type.getCumulativeArraySize() : 1;
+ varSlotMap[name] = binding;
+ reserveSlot(resource, binding, numBindings);
+ } else {
+ // Allocate binding by name for OpenGL driver, so the resource in different
+ // stages should be declared with the same binding
+ if (iter->second != binding) {
+ TString errorMsg = "Invalid binding: " + name;
+ infoSink.info.message(EPrefixInternalError, errorMsg.c_str());
+ }
+ }
}
+}
- static bool isUboType(const glslang::TType& type) {
- return type.getQualifier().storage == EvqUniform;
- }
-};
+//TDefaultGlslIoResolver end
/*
* Basic implementation of glslang::TIoMapResolver that replaces the
@@ -567,69 +901,51 @@ protected:
/*
* Default resolver
*/
-struct TDefaultIoResolver : public TDefaultIoResolverBase
-{
- TDefaultIoResolver(const TIntermediate &intermediate) : TDefaultIoResolverBase(intermediate) { }
+struct TDefaultIoResolver : public TDefaultIoResolverBase {
+ TDefaultIoResolver(const TIntermediate& intermediate) : TDefaultIoResolverBase(intermediate) { }
- bool validateBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& /*type*/, bool /*is_live*/) override
- {
- return true;
+ bool validateBinding(EShLanguage /*stage*/, TVarEntryInfo& /*ent*/) override { return true; }
+
+ TResourceType getResourceType(const glslang::TType& type) override {
+ if (isImageType(type)) {
+ return EResImage;
+ }
+ if (isTextureType(type)) {
+ return EResTexture;
+ }
+ if (isSsboType(type)) {
+ return EResSsbo;
+ }
+ if (isSamplerType(type)) {
+ return EResSampler;
+ }
+ if (isUboType(type)) {
+ return EResUbo;
+ }
+ return EResCount;
}
- int resolveBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override
- {
+ int resolveBinding(EShLanguage /*stage*/, TVarEntryInfo& ent) override {
+ const TType& type = ent.symbol->getType();
const int set = getLayoutSet(type);
// On OpenGL arrays of opaque types take a seperate binding for each element
int numBindings = intermediate.getSpv().openGl != 0 && type.isSizedArray() ? type.getCumulativeArraySize() : 1;
-
- if (type.getQualifier().hasBinding()) {
- if (isImageType(type))
- return reserveSlot(set, getBaseBinding(EResImage, set) + type.getQualifier().layoutBinding, numBindings);
-
- if (isTextureType(type))
- return reserveSlot(set, getBaseBinding(EResTexture, set) + type.getQualifier().layoutBinding, numBindings);
-
- if (isSsboType(type))
- return reserveSlot(set, getBaseBinding(EResSsbo, set) + type.getQualifier().layoutBinding, numBindings);
-
- if (isSamplerType(type))
- return reserveSlot(set, getBaseBinding(EResSampler, set) + type.getQualifier().layoutBinding, numBindings);
-
- if (isUboType(type))
- return reserveSlot(set, getBaseBinding(EResUbo, set) + type.getQualifier().layoutBinding, numBindings);
- } else if (is_live && doAutoBindingMapping()) {
- // find free slot, the caller did make sure it passes all vars with binding
- // first and now all are passed that do not have a binding and needs one
-
- if (isImageType(type))
- return getFreeSlot(set, getBaseBinding(EResImage, set), numBindings);
-
- if (isTextureType(type))
- return getFreeSlot(set, getBaseBinding(EResTexture, set), numBindings);
-
- if (isSsboType(type))
- return getFreeSlot(set, getBaseBinding(EResSsbo, set), numBindings);
-
- if (isSamplerType(type))
- return getFreeSlot(set, getBaseBinding(EResSampler, set), numBindings);
-
- if (isUboType(type))
- return getFreeSlot(set, getBaseBinding(EResUbo, set), numBindings);
+ TResourceType resource = getResourceType(type);
+ if (resource < EResCount) {
+ if (type.getQualifier().hasBinding()) {
+ return ent.newBinding = reserveSlot(
+ set, getBaseBinding(resource, set) + type.getQualifier().layoutBinding, numBindings);
+ } else if (ent.live && doAutoBindingMapping()) {
+ // find free slot, the caller did make sure it passes all vars with binding
+ // first and now all are passed that do not have a binding and needs one
+ return ent.newBinding = getFreeSlot(set, getBaseBinding(resource, set), numBindings);
+ }
}
-
- return -1;
- }
-
-protected:
- static bool isImageType(const glslang::TType& type) {
- return type.getBasicType() == glslang::EbtSampler && type.getSampler().isImage();
- }
-
- static bool isSsboType(const glslang::TType& type) {
- return type.getQualifier().storage == EvqBuffer;
+ return ent.newBinding = -1;
}
};
+#ifdef ENABLE_HLSL
/********************************************************************************
The following IO resolver maps types in HLSL register space, as follows:
@@ -647,7 +963,7 @@ t - for shader resource views (SRV)
BYTEADDRESSBUFFER
BUFFER
TBUFFER
-
+
s - for samplers
SAMPLER
SAMPLER1D
@@ -673,98 +989,69 @@ b - for constant buffer views (CBV)
CBUFFER
CONSTANTBUFFER
********************************************************************************/
-struct TDefaultHlslIoResolver : public TDefaultIoResolverBase
-{
- TDefaultHlslIoResolver(const TIntermediate &intermediate) : TDefaultIoResolverBase(intermediate) { }
-
- bool validateBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& /*type*/, bool /*is_live*/) override
- {
- return true;
- }
-
- int resolveBinding(EShLanguage /*stage*/, const char* /*name*/, const glslang::TType& type, bool is_live) override
- {
- const int set = getLayoutSet(type);
-
- if (type.getQualifier().hasBinding()) {
- if (isUavType(type))
- return reserveSlot(set, getBaseBinding(EResUav, set) + type.getQualifier().layoutBinding);
-
- if (isSrvType(type))
- return reserveSlot(set, getBaseBinding(EResTexture, set) + type.getQualifier().layoutBinding);
+struct TDefaultHlslIoResolver : public TDefaultIoResolverBase {
+ TDefaultHlslIoResolver(const TIntermediate& intermediate) : TDefaultIoResolverBase(intermediate) { }
- if (isSamplerType(type))
- return reserveSlot(set, getBaseBinding(EResSampler, set) + type.getQualifier().layoutBinding);
+ bool validateBinding(EShLanguage /*stage*/, TVarEntryInfo& /*ent*/) override { return true; }
- if (isUboType(type))
- return reserveSlot(set, getBaseBinding(EResUbo, set) + type.getQualifier().layoutBinding);
- } else if (is_live && doAutoBindingMapping()) {
- // find free slot, the caller did make sure it passes all vars with binding
- // first and now all are passed that do not have a binding and needs one
-
- if (isUavType(type))
- return getFreeSlot(set, getBaseBinding(EResUav, set));
-
- if (isSrvType(type))
- return getFreeSlot(set, getBaseBinding(EResTexture, set));
-
- if (isSamplerType(type))
- return getFreeSlot(set, getBaseBinding(EResSampler, set));
-
- if (isUboType(type))
- return getFreeSlot(set, getBaseBinding(EResUbo, set));
+ TResourceType getResourceType(const glslang::TType& type) override {
+ if (isUavType(type)) {
+ return EResUav;
}
-
- return -1;
- }
-
-protected:
- // Return true if this is a SRV (shader resource view) type:
- static bool isSrvType(const glslang::TType& type) {
- return isTextureType(type) || type.getQualifier().storage == EvqBuffer;
+ if (isSrvType(type)) {
+ return EResTexture;
+ }
+ if (isSamplerType(type)) {
+ return EResSampler;
+ }
+ if (isUboType(type)) {
+ return EResUbo;
+ }
+ return EResCount;
}
- // Return true if this is a UAV (unordered access view) type:
- static bool isUavType(const glslang::TType& type) {
- if (type.getQualifier().readonly)
- return false;
-
- return (type.getBasicType() == glslang::EbtSampler && type.getSampler().isImage()) ||
- (type.getQualifier().storage == EvqBuffer);
+ int resolveBinding(EShLanguage /*stage*/, TVarEntryInfo& ent) override {
+ const TType& type = ent.symbol->getType();
+ const int set = getLayoutSet(type);
+ TResourceType resource = getResourceType(type);
+ if (resource < EResCount) {
+ if (type.getQualifier().hasBinding()) {
+ return ent.newBinding = reserveSlot(set, getBaseBinding(resource, set) + type.getQualifier().layoutBinding);
+ } else if (ent.live && doAutoBindingMapping()) {
+ // find free slot, the caller did make sure it passes all vars with binding
+ // first and now all are passed that do not have a binding and needs one
+ return ent.newBinding = getFreeSlot(set, getBaseBinding(resource, set));
+ }
+ }
+ return ent.newBinding = -1;
}
};
-
+#endif
// Map I/O variables to provided offsets, and make bindings for
// unbound but live variables.
//
// Returns false if the input is too malformed to do this.
-bool TIoMapper::addStage(EShLanguage stage, TIntermediate &intermediate, TInfoSink &infoSink, TIoMapResolver *resolver)
-{
- bool somethingToDo = !intermediate.getResourceSetBinding().empty() ||
- intermediate.getAutoMapBindings() ||
- intermediate.getAutoMapLocations();
-
- for (int res = 0; res < EResCount; ++res) {
- somethingToDo = somethingToDo ||
- (intermediate.getShiftBinding(TResourceType(res)) != 0) ||
- intermediate.hasShiftBindingForSet(TResourceType(res));
+bool TIoMapper::addStage(EShLanguage stage, TIntermediate& intermediate, TInfoSink& infoSink, TIoMapResolver* resolver) {
+ bool somethingToDo = ! intermediate.getResourceSetBinding().empty() || intermediate.getAutoMapBindings() ||
+ intermediate.getAutoMapLocations();
+ // Restrict the stricter condition to further check 'somethingToDo' only if 'somethingToDo' has not been set, reduce
+ // unnecessary or insignificant for-loop operation after 'somethingToDo' have been true.
+ for (int res = 0; (res < EResCount && !somethingToDo); ++res) {
+ somethingToDo = somethingToDo || (intermediate.getShiftBinding(TResourceType(res)) != 0) ||
+ intermediate.hasShiftBindingForSet(TResourceType(res));
}
-
- if (!somethingToDo && resolver == nullptr)
+ if (! somethingToDo && resolver == nullptr)
return true;
-
if (intermediate.getNumEntryPoints() != 1 || intermediate.isRecursive())
return false;
-
TIntermNode* root = intermediate.getTreeRoot();
if (root == nullptr)
return false;
-
// if no resolver is provided, use the default resolver with the given shifts and auto map settings
TDefaultIoResolver defaultResolver(intermediate);
+#ifdef ENABLE_HLSL
TDefaultHlslIoResolver defaultHlslResolver(intermediate);
-
if (resolver == nullptr) {
// TODO: use a passed in IO mapper for this
if (intermediate.usingHlslIoMapping())
@@ -772,47 +1059,191 @@ bool TIoMapper::addStage(EShLanguage stage, TIntermediate &intermediate, TInfoSi
else
resolver = &defaultResolver;
}
+ resolver->addStage(stage);
+#else
+ resolver = &defaultResolver;
+#endif
TVarLiveMap inVarMap, outVarMap, uniformVarMap;
+ TVarLiveVector inVector, outVector, uniformVector;
TVarGatherTraverser iter_binding_all(intermediate, true, inVarMap, outVarMap, uniformVarMap);
TVarGatherTraverser iter_binding_live(intermediate, false, inVarMap, outVarMap, uniformVarMap);
-
root->traverse(&iter_binding_all);
iter_binding_live.pushFunction(intermediate.getEntryPointMangledName().c_str());
-
- while (!iter_binding_live.functions.empty()) {
+ while (! iter_binding_live.functions.empty()) {
TIntermNode* function = iter_binding_live.functions.back();
iter_binding_live.functions.pop_back();
function->traverse(&iter_binding_live);
}
-
// sort entries by priority. see TVarEntryInfo::TOrderByPriority for info.
- std::sort(uniformVarMap.begin(), uniformVarMap.end(), TVarEntryInfo::TOrderByPriority());
-
+ std::for_each(inVarMap.begin(), inVarMap.end(),
+ [&inVector](TVarLivePair p) { inVector.push_back(p); });
+ std::sort(inVector.begin(), inVector.end(), [](const TVarLivePair& p1, const TVarLivePair& p2) -> bool {
+ return TVarEntryInfo::TOrderByPriority()(p1.second, p2.second);
+ });
+ std::for_each(outVarMap.begin(), outVarMap.end(),
+ [&outVector](TVarLivePair p) { outVector.push_back(p); });
+ std::sort(outVector.begin(), outVector.end(), [](const TVarLivePair& p1, const TVarLivePair& p2) -> bool {
+ return TVarEntryInfo::TOrderByPriority()(p1.second, p2.second);
+ });
+ std::for_each(uniformVarMap.begin(), uniformVarMap.end(),
+ [&uniformVector](TVarLivePair p) { uniformVector.push_back(p); });
+ std::sort(uniformVector.begin(), uniformVector.end(), [](const TVarLivePair& p1, const TVarLivePair& p2) -> bool {
+ return TVarEntryInfo::TOrderByPriority()(p1.second, p2.second);
+ });
bool hadError = false;
TNotifyInOutAdaptor inOutNotify(stage, *resolver);
TNotifyUniformAdaptor uniformNotify(stage, *resolver);
- TResolverUniformAdaptor uniformResolve(stage, *resolver, infoSink, hadError, intermediate);
- TResolverInOutAdaptor inOutResolve(stage, *resolver, infoSink, hadError, intermediate);
+ TResolverUniformAdaptor uniformResolve(stage, *resolver, infoSink, hadError);
+ TResolverInOutAdaptor inOutResolve(stage, *resolver, infoSink, hadError);
resolver->beginNotifications(stage);
- std::for_each(inVarMap.begin(), inVarMap.end(), inOutNotify);
- std::for_each(outVarMap.begin(), outVarMap.end(), inOutNotify);
- std::for_each(uniformVarMap.begin(), uniformVarMap.end(), uniformNotify);
+ std::for_each(inVector.begin(), inVector.end(), inOutNotify);
+ std::for_each(outVector.begin(), outVector.end(), inOutNotify);
+ std::for_each(uniformVector.begin(), uniformVector.end(), uniformNotify);
resolver->endNotifications(stage);
resolver->beginResolve(stage);
- std::for_each(inVarMap.begin(), inVarMap.end(), inOutResolve);
- std::for_each(outVarMap.begin(), outVarMap.end(), inOutResolve);
- std::for_each(uniformVarMap.begin(), uniformVarMap.end(), uniformResolve);
+ std::for_each(inVector.begin(), inVector.end(), inOutResolve);
+ std::for_each(inVector.begin(), inVector.end(), [&inVarMap](TVarLivePair p) {
+ auto at = inVarMap.find(p.second.symbol->getName());
+ if (at != inVarMap.end())
+ at->second = p.second;
+ });
+ std::for_each(outVector.begin(), outVector.end(), inOutResolve);
+ std::for_each(outVector.begin(), outVector.end(), [&outVarMap](TVarLivePair p) {
+ auto at = outVarMap.find(p.second.symbol->getName());
+ if (at != outVarMap.end())
+ at->second = p.second;
+ });
+ std::for_each(uniformVector.begin(), uniformVector.end(), uniformResolve);
+ std::for_each(uniformVector.begin(), uniformVector.end(), [&uniformVarMap](TVarLivePair p) {
+ auto at = uniformVarMap.find(p.second.symbol->getName());
+ if (at != uniformVarMap.end())
+ at->second = p.second;
+ });
resolver->endResolve(stage);
-
if (!hadError) {
- // sort by id again, so we can use lower bound to find entries
- std::sort(uniformVarMap.begin(), uniformVarMap.end(), TVarEntryInfo::TOrderById());
TVarSetTraverser iter_iomap(intermediate, inVarMap, outVarMap, uniformVarMap);
root->traverse(&iter_iomap);
}
+ return !hadError;
+}
+// Map I/O variables to provided offsets, and make bindings for
+// unbound but live variables.
+//
+// Returns false if the input is too malformed to do this.
+bool TGlslIoMapper::addStage(EShLanguage stage, TIntermediate& intermediate, TInfoSink& infoSink, TIoMapResolver* resolver) {
+
+ bool somethingToDo = ! intermediate.getResourceSetBinding().empty() || intermediate.getAutoMapBindings() ||
+ intermediate.getAutoMapLocations();
+ // Restrict the stricter condition to further check 'somethingToDo' only if 'somethingToDo' has not been set, reduce
+ // unnecessary or insignificant for-loop operation after 'somethingToDo' have been true.
+ for (int res = 0; (res < EResCount && !somethingToDo); ++res) {
+ somethingToDo = somethingToDo || (intermediate.getShiftBinding(TResourceType(res)) != 0) ||
+ intermediate.hasShiftBindingForSet(TResourceType(res));
+ }
+ if (! somethingToDo && resolver == nullptr) {
+ return true;
+ }
+ if (intermediate.getNumEntryPoints() != 1 || intermediate.isRecursive()) {
+ return false;
+ }
+ TIntermNode* root = intermediate.getTreeRoot();
+ if (root == nullptr) {
+ return false;
+ }
+ // if no resolver is provided, use the default resolver with the given shifts and auto map settings
+ TDefaultGlslIoResolver defaultResolver(intermediate);
+ if (resolver == nullptr) {
+ resolver = &defaultResolver;
+ }
+ resolver->addStage(stage);
+ inVarMaps[stage] = new TVarLiveMap, outVarMaps[stage] = new TVarLiveMap(), uniformVarMap[stage] = new TVarLiveMap();
+ TVarGatherTraverser iter_binding_all(intermediate, true, *inVarMaps[stage], *outVarMaps[stage],
+ *uniformVarMap[stage]);
+ TVarGatherTraverser iter_binding_live(intermediate, false, *inVarMaps[stage], *outVarMaps[stage],
+ *uniformVarMap[stage]);
+ root->traverse(&iter_binding_all);
+ iter_binding_live.pushFunction(intermediate.getEntryPointMangledName().c_str());
+ while (! iter_binding_live.functions.empty()) {
+ TIntermNode* function = iter_binding_live.functions.back();
+ iter_binding_live.functions.pop_back();
+ function->traverse(&iter_binding_live);
+ }
+ TNotifyInOutAdaptor inOutNotify(stage, *resolver);
+ TNotifyUniformAdaptor uniformNotify(stage, *resolver);
+ // Resolve current stage input symbol location with previous stage output here,
+ // uniform symbol, ubo, ssbo and opaque symbols are per-program resource,
+ // will resolve uniform symbol location and ubo/ssbo/opaque binding in doMap()
+ resolver->beginNotifications(stage);
+ std::for_each(inVarMaps[stage]->begin(), inVarMaps[stage]->end(), inOutNotify);
+ std::for_each(outVarMaps[stage]->begin(), outVarMaps[stage]->end(), inOutNotify);
+ std::for_each(uniformVarMap[stage]->begin(), uniformVarMap[stage]->end(), uniformNotify);
+ resolver->endNotifications(stage);
+ TSlotCollector slotCollector(*resolver, infoSink);
+ resolver->beginCollect(stage);
+ std::for_each(inVarMaps[stage]->begin(), inVarMaps[stage]->end(), slotCollector);
+ std::for_each(outVarMaps[stage]->begin(), outVarMaps[stage]->end(), slotCollector);
+ std::for_each(uniformVarMap[stage]->begin(), uniformVarMap[stage]->end(), slotCollector);
+ resolver->endCollect(stage);
+ intermediates[stage] = &intermediate;
return !hadError;
}
+bool TGlslIoMapper::doMap(TIoMapResolver* resolver, TInfoSink& infoSink) {
+ resolver->endResolve(EShLangCount);
+ if (!hadError) {
+ //Resolve uniform location, ubo/ssbo/opaque bindings across stages
+ TResolverUniformAdaptor uniformResolve(EShLangCount, *resolver, infoSink, hadError);
+ TResolverInOutAdaptor inOutResolve(EShLangCount, *resolver, infoSink, hadError);
+ TSymbolValidater symbolValidater(*resolver, infoSink, inVarMaps, outVarMaps, uniformVarMap, hadError);
+ TVarLiveVector uniformVector;
+ resolver->beginResolve(EShLangCount);
+ for (int stage = EShLangVertex; stage < EShLangCount; stage++) {
+ if (inVarMaps[stage] != nullptr) {
+ inOutResolve.setStage(EShLanguage(stage));
+ std::for_each(inVarMaps[stage]->begin(), inVarMaps[stage]->end(), symbolValidater);
+ std::for_each(inVarMaps[stage]->begin(), inVarMaps[stage]->end(), inOutResolve);
+ std::for_each(outVarMaps[stage]->begin(), outVarMaps[stage]->end(), symbolValidater);
+ std::for_each(outVarMaps[stage]->begin(), outVarMaps[stage]->end(), inOutResolve);
+ }
+ if (uniformVarMap[stage] != nullptr) {
+ uniformResolve.setStage(EShLanguage(stage));
+ // sort entries by priority. see TVarEntryInfo::TOrderByPriority for info.
+ std::for_each(uniformVarMap[stage]->begin(), uniformVarMap[stage]->end(),
+ [&uniformVector](TVarLivePair p) { uniformVector.push_back(p); });
+ }
+ }
+ std::sort(uniformVector.begin(), uniformVector.end(), [](const TVarLivePair& p1, const TVarLivePair& p2) -> bool {
+ return TVarEntryInfo::TOrderByPriority()(p1.second, p2.second);
+ });
+ std::for_each(uniformVector.begin(), uniformVector.end(), symbolValidater);
+ std::for_each(uniformVector.begin(), uniformVector.end(), uniformResolve);
+ std::sort(uniformVector.begin(), uniformVector.end(), [](const TVarLivePair& p1, const TVarLivePair& p2) -> bool {
+ return TVarEntryInfo::TOrderByPriority()(p1.second, p2.second);
+ });
+ resolver->endResolve(EShLangCount);
+ for (size_t stage = 0; stage < EShLangCount; stage++) {
+ if (intermediates[stage] != nullptr) {
+ // traverse each stage, set new location to each input/output and unifom symbol, set new binding to
+ // ubo, ssbo and opaque symbols
+ TVarLiveMap** pUniformVarMap = uniformVarMap;
+ std::for_each(uniformVector.begin(), uniformVector.end(), [pUniformVarMap, stage](TVarLivePair p) {
+ auto at = pUniformVarMap[stage]->find(p.second.symbol->getName());
+ if (at != pUniformVarMap[stage]->end())
+ at->second = p.second;
+ });
+ TVarSetTraverser iter_iomap(*intermediates[stage], *inVarMaps[stage], *outVarMaps[stage],
+ *uniformVarMap[stage]);
+ intermediates[stage]->getTreeRoot()->traverse(&iter_iomap);
+ }
+ }
+ return !hadError;
+ } else {
+ return false;
+ }
+}
+
} // end namespace glslang
+
+#endif // GLSLANG_WEB
diff --git a/thirdparty/glslang/glslang/MachineIndependent/iomapper.h b/thirdparty/glslang/glslang/MachineIndependent/iomapper.h
index 5e0d4391cc..684e88d571 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/iomapper.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/iomapper.h
@@ -33,11 +33,15 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+#ifndef GLSLANG_WEB
+
#ifndef _IOMAPPER_INCLUDED
#define _IOMAPPER_INCLUDED
-#include "../Public/ShaderLang.h"
-
+#include <cstdint>
+#include "LiveTraverser.h"
+#include <unordered_map>
+#include <unordered_set>
//
// A reflection database and its interface, consistent with the OpenGL API reflection queries.
//
@@ -47,17 +51,249 @@ class TInfoSink;
namespace glslang {
class TIntermediate;
+struct TVarEntryInfo {
+ int id;
+ TIntermSymbol* symbol;
+ bool live;
+ int newBinding;
+ int newSet;
+ int newLocation;
+ int newComponent;
+ int newIndex;
+ EShLanguage stage;
+ struct TOrderById {
+ inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r) { return l.id < r.id; }
+ };
+
+ struct TOrderByPriority {
+ // ordering:
+ // 1) has both binding and set
+ // 2) has binding but no set
+ // 3) has no binding but set
+ // 4) has no binding and no set
+ inline bool operator()(const TVarEntryInfo& l, const TVarEntryInfo& r) {
+ const TQualifier& lq = l.symbol->getQualifier();
+ const TQualifier& rq = r.symbol->getQualifier();
+
+ // simple rules:
+ // has binding gives 2 points
+ // has set gives 1 point
+ // who has the most points is more important.
+ int lPoints = (lq.hasBinding() ? 2 : 0) + (lq.hasSet() ? 1 : 0);
+ int rPoints = (rq.hasBinding() ? 2 : 0) + (rq.hasSet() ? 1 : 0);
+
+ if (lPoints == rPoints)
+ return l.id < r.id;
+ return lPoints > rPoints;
+ }
+ };
+};
+
+// Base class for shared TIoMapResolver services, used by several derivations.
+struct TDefaultIoResolverBase : public glslang::TIoMapResolver {
+public:
+ TDefaultIoResolverBase(const TIntermediate& intermediate);
+ typedef std::vector<int> TSlotSet;
+ typedef std::unordered_map<int, TSlotSet> TSlotSetMap;
+
+ // grow the reflection stage by stage
+ void notifyBinding(EShLanguage, TVarEntryInfo& /*ent*/) override {}
+ void notifyInOut(EShLanguage, TVarEntryInfo& /*ent*/) override {}
+ void beginNotifications(EShLanguage) override {}
+ void endNotifications(EShLanguage) override {}
+ void beginResolve(EShLanguage) override {}
+ void endResolve(EShLanguage) override {}
+ void beginCollect(EShLanguage) override {}
+ void endCollect(EShLanguage) override {}
+ void reserverResourceSlot(TVarEntryInfo& /*ent*/, TInfoSink& /*infoSink*/) override {}
+ void reserverStorageSlot(TVarEntryInfo& /*ent*/, TInfoSink& /*infoSink*/) override {}
+ int getBaseBinding(TResourceType res, unsigned int set) const;
+ const std::vector<std::string>& getResourceSetBinding() const;
+ virtual TResourceType getResourceType(const glslang::TType& type) = 0;
+ bool doAutoBindingMapping() const;
+ bool doAutoLocationMapping() const;
+ TSlotSet::iterator findSlot(int set, int slot);
+ bool checkEmpty(int set, int slot);
+ bool validateInOut(EShLanguage /*stage*/, TVarEntryInfo& /*ent*/) override { return true; }
+ int reserveSlot(int set, int slot, int size = 1);
+ int getFreeSlot(int set, int base, int size = 1);
+ int resolveSet(EShLanguage /*stage*/, TVarEntryInfo& ent) override;
+ int resolveUniformLocation(EShLanguage /*stage*/, TVarEntryInfo& ent) override;
+ int resolveInOutLocation(EShLanguage stage, TVarEntryInfo& ent) override;
+ int resolveInOutComponent(EShLanguage /*stage*/, TVarEntryInfo& ent) override;
+ int resolveInOutIndex(EShLanguage /*stage*/, TVarEntryInfo& ent) override;
+ void addStage(EShLanguage stage) override {
+ if (stage < EShLangCount)
+ stageMask[stage] = true;
+ }
+ uint32_t computeTypeLocationSize(const TType& type, EShLanguage stage);
+
+ TSlotSetMap slots;
+
+protected:
+ TDefaultIoResolverBase(TDefaultIoResolverBase&);
+ TDefaultIoResolverBase& operator=(TDefaultIoResolverBase&);
+ const TIntermediate& intermediate;
+ int nextUniformLocation;
+ int nextInputLocation;
+ int nextOutputLocation;
+ bool stageMask[EShLangCount + 1];
+ // Return descriptor set specific base if there is one, and the generic base otherwise.
+ int selectBaseBinding(int base, int descriptorSetBase) const {
+ return descriptorSetBase != -1 ? descriptorSetBase : base;
+ }
+
+ static int getLayoutSet(const glslang::TType& type) {
+ if (type.getQualifier().hasSet())
+ return type.getQualifier().layoutSet;
+ else
+ return 0;
+ }
+
+ static bool isSamplerType(const glslang::TType& type) {
+ return type.getBasicType() == glslang::EbtSampler && type.getSampler().isPureSampler();
+ }
+
+ static bool isTextureType(const glslang::TType& type) {
+ return (type.getBasicType() == glslang::EbtSampler &&
+ (type.getSampler().isTexture() || type.getSampler().isSubpass()));
+ }
+
+ static bool isUboType(const glslang::TType& type) {
+ return type.getQualifier().storage == EvqUniform;
+ }
+
+ static bool isImageType(const glslang::TType& type) {
+ return type.getBasicType() == glslang::EbtSampler && type.getSampler().isImage();
+ }
+
+ static bool isSsboType(const glslang::TType& type) {
+ return type.getQualifier().storage == EvqBuffer;
+ }
+
+ // Return true if this is a SRV (shader resource view) type:
+ static bool isSrvType(const glslang::TType& type) {
+ return isTextureType(type) || type.getQualifier().storage == EvqBuffer;
+ }
+
+ // Return true if this is a UAV (unordered access view) type:
+ static bool isUavType(const glslang::TType& type) {
+ if (type.getQualifier().isReadOnly())
+ return false;
+ return (type.getBasicType() == glslang::EbtSampler && type.getSampler().isImage()) ||
+ (type.getQualifier().storage == EvqBuffer);
+ }
+};
+
+// Defaulf I/O resolver for OpenGL
+struct TDefaultGlslIoResolver : public TDefaultIoResolverBase {
+public:
+ typedef std::map<TString, int> TVarSlotMap; // <resourceName, location/binding>
+ typedef std::map<int, TVarSlotMap> TSlotMap; // <resourceKey, TVarSlotMap>
+ TDefaultGlslIoResolver(const TIntermediate& intermediate);
+ bool validateBinding(EShLanguage /*stage*/, TVarEntryInfo& /*ent*/) override { return true; }
+ TResourceType getResourceType(const glslang::TType& type) override;
+ int resolveInOutLocation(EShLanguage stage, TVarEntryInfo& ent) override;
+ int resolveUniformLocation(EShLanguage /*stage*/, TVarEntryInfo& ent) override;
+ int resolveBinding(EShLanguage /*stage*/, TVarEntryInfo& ent) override;
+ void beginResolve(EShLanguage /*stage*/) override;
+ void endResolve(EShLanguage stage) override;
+ void beginCollect(EShLanguage) override;
+ void endCollect(EShLanguage) override;
+ void reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& infoSink) override;
+ void reserverResourceSlot(TVarEntryInfo& ent, TInfoSink& infoSink) override;
+ // in/out symbol and uniform symbol are stored in the same resourceSlotMap, the storage key is used to identify each type of symbol.
+ // We use stage and storage qualifier to construct a storage key. it can help us identify the same storage resource used in different stage.
+ // if a resource is a program resource and we don't need know it usage stage, we can use same stage to build storage key.
+ // Note: both stage and type must less then 0xffff.
+ int buildStorageKey(EShLanguage stage, TStorageQualifier type) {
+ assert(static_cast<uint32_t>(stage) <= 0x0000ffff && static_cast<uint32_t>(type) <= 0x0000ffff);
+ return (stage << 16) | type;
+ }
+
+protected:
+ // Use for mark pre stage, to get more interface symbol information.
+ EShLanguage preStage;
+ // Use for mark current shader stage for resolver
+ EShLanguage currentStage;
+ // Slot map for storage resource(location of uniform and interface symbol) It's a program share slot
+ TSlotMap resourceSlotMap;
+ // Slot map for other resource(image, ubo, ssbo), It's a program share slot.
+ TSlotMap storageSlotMap;
+};
+
+typedef std::map<TString, TVarEntryInfo> TVarLiveMap;
+
+// override function "operator=", if a vector<const _Kty, _Ty> being sort,
+// when use vc++, the sort function will call :
+// pair& operator=(const pair<_Other1, _Other2>& _Right)
+// {
+// first = _Right.first;
+// second = _Right.second;
+// return (*this);
+// }
+// that will make a const type handing on left.
+// override this function can avoid a compiler error.
+// In the future, if the vc++ compiler can handle such a situation,
+// this part of the code will be removed.
+struct TVarLivePair : std::pair<const TString, TVarEntryInfo> {
+ TVarLivePair(std::pair<const TString, TVarEntryInfo>& _Right) : pair(_Right.first, _Right.second) {}
+ TVarLivePair& operator=(const TVarLivePair& _Right) {
+ const_cast<TString&>(first) = _Right.first;
+ second = _Right.second;
+ return (*this);
+ }
+};
+typedef std::vector<TVarLivePair> TVarLiveVector;
// I/O mapper
class TIoMapper {
public:
TIoMapper() {}
virtual ~TIoMapper() {}
+ // grow the reflection stage by stage
+ bool virtual addStage(EShLanguage, TIntermediate&, TInfoSink&, TIoMapResolver*);
+ bool virtual doMap(TIoMapResolver*, TInfoSink&) { return true; }
+};
+// I/O mapper for OpenGL
+class TGlslIoMapper : public TIoMapper {
+public:
+ TGlslIoMapper() {
+ memset(inVarMaps, 0, sizeof(TVarLiveMap*) * (EShLangCount + 1));
+ memset(outVarMaps, 0, sizeof(TVarLiveMap*) * (EShLangCount + 1));
+ memset(uniformVarMap, 0, sizeof(TVarLiveMap*) * (EShLangCount + 1));
+ memset(intermediates, 0, sizeof(TIntermediate*) * (EShLangCount + 1));
+ }
+ virtual ~TGlslIoMapper() {
+ for (size_t stage = 0; stage < EShLangCount; stage++) {
+ if (inVarMaps[stage] != nullptr) {
+ delete inVarMaps[stage];
+ inVarMaps[stage] = nullptr;
+ }
+ if (outVarMaps[stage] != nullptr) {
+ delete outVarMaps[stage];
+ outVarMaps[stage] = nullptr;
+ }
+ if (uniformVarMap[stage] != nullptr) {
+ delete uniformVarMap[stage];
+ uniformVarMap[stage] = nullptr;
+ }
+ if (intermediates[stage] != nullptr)
+ intermediates[stage] = nullptr;
+ }
+ }
// grow the reflection stage by stage
- bool addStage(EShLanguage, TIntermediate&, TInfoSink&, TIoMapResolver*);
+ bool addStage(EShLanguage, TIntermediate&, TInfoSink&, TIoMapResolver*) override;
+ bool doMap(TIoMapResolver*, TInfoSink&) override;
+ TVarLiveMap *inVarMaps[EShLangCount], *outVarMaps[EShLangCount],
+ *uniformVarMap[EShLangCount];
+ TIntermediate* intermediates[EShLangCount];
+ bool hadError = false;
};
} // end namespace glslang
#endif // _IOMAPPER_INCLUDED
+
+#endif // GLSLANG_WEB
diff --git a/thirdparty/glslang/glslang/MachineIndependent/limits.cpp b/thirdparty/glslang/glslang/MachineIndependent/limits.cpp
index 64d191b472..51d9300341 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/limits.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/limits.cpp
@@ -187,12 +187,14 @@ bool TIndexTraverser::visitAggregate(TVisit /* visit */, TIntermAggregate* node)
//
void TParseContext::constantIndexExpressionCheck(TIntermNode* index)
{
+#ifndef GLSLANG_WEB
TIndexTraverser it(inductiveLoopIds);
index->traverse(&it);
if (it.bad)
error(it.badLoc, "Non-constant-index-expression", "limitations", "");
+#endif
}
} // end namespace glslang
diff --git a/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp b/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp
index f935d4a629..fe51ec93ff 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/linkValidate.cpp
@@ -56,8 +56,10 @@ namespace glslang {
//
void TIntermediate::error(TInfoSink& infoSink, const char* message)
{
+#ifndef GLSLANG_WEB
infoSink.info.prefix(EPrefixError);
infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n";
+#endif
++numErrors;
}
@@ -65,8 +67,10 @@ void TIntermediate::error(TInfoSink& infoSink, const char* message)
// Link-time warning.
void TIntermediate::warn(TInfoSink& infoSink, const char* message)
{
+#ifndef GLSLANG_WEB
infoSink.info.prefix(EPrefixWarning);
infoSink.info << "Linking " << StageName(language) << " stage: " << message << "\n";
+#endif
}
// TODO: 4.4 offset/align: "Two blocks linked together in the same program with the same block
@@ -78,9 +82,11 @@ void TIntermediate::warn(TInfoSink& infoSink, const char* message)
//
void TIntermediate::merge(TInfoSink& infoSink, TIntermediate& unit)
{
+#ifndef GLSLANG_WEB
mergeCallGraphs(infoSink, unit);
mergeModes(infoSink, unit);
mergeTrees(infoSink, unit);
+#endif
}
void TIntermediate::mergeCallGraphs(TInfoSink& infoSink, TIntermediate& unit)
@@ -98,6 +104,8 @@ void TIntermediate::mergeCallGraphs(TInfoSink& infoSink, TIntermediate& unit)
callGraph.insert(callGraph.end(), unit.callGraph.begin(), unit.callGraph.end());
}
+#ifndef GLSLANG_WEB
+
#define MERGE_MAX(member) member = std::max(member, unit.member)
#define MERGE_TRUE(member) if (unit.member) member = unit.member;
@@ -106,9 +114,9 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
if (language != unit.language)
error(infoSink, "stages must match when linking into a single stage");
- if (source == EShSourceNone)
- source = unit.source;
- if (source != unit.source)
+ if (getSource() == EShSourceNone)
+ setSource(unit.getSource());
+ if (getSource() != unit.getSource())
error(infoSink, "can't link compilation units from different source languages");
if (treeRoot == nullptr) {
@@ -116,7 +124,7 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
version = unit.version;
requestedExtensions = unit.requestedExtensions;
} else {
- if ((profile == EEsProfile) != (unit.profile == EEsProfile))
+ if ((isEsProfile()) != (unit.isEsProfile()))
error(infoSink, "Cannot cross link ES and desktop profiles");
else if (unit.profile == ECompatibilityProfile)
profile = ECompatibilityProfile;
@@ -142,18 +150,13 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
if (vertices == TQualifier::layoutNotSet)
vertices = unit.vertices;
else if (vertices != unit.vertices) {
- if (language == EShLangGeometry
-#ifdef NV_EXTENSIONS
- || language == EShLangMeshNV
-#endif
- )
+ if (language == EShLangGeometry || language == EShLangMeshNV)
error(infoSink, "Contradictory layout max_vertices values");
else if (language == EShLangTessControl)
error(infoSink, "Contradictory layout vertices values");
else
assert(0);
}
-#ifdef NV_EXTENSIONS
if (primitives == TQualifier::layoutNotSet)
primitives = unit.primitives;
else if (primitives != unit.primitives) {
@@ -162,7 +165,6 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
else
assert(0);
}
-#endif
if (inputPrimitive == ElgNone)
inputPrimitive = unit.inputPrimitive;
@@ -190,12 +192,14 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
MERGE_TRUE(pointMode);
for (int i = 0; i < 3; ++i) {
- if (localSize[i] > 1)
+ if (!localSizeNotDefault[i] && unit.localSizeNotDefault[i]) {
localSize[i] = unit.localSize[i];
+ localSizeNotDefault[i] = true;
+ }
else if (localSize[i] != unit.localSize[i])
error(infoSink, "Contradictory local size");
- if (localSizeSpecId[i] != TQualifier::layoutNotSet)
+ if (localSizeSpecId[i] == TQualifier::layoutNotSet)
localSizeSpecId[i] = unit.localSizeSpecId[i];
else if (localSizeSpecId[i] != unit.localSizeSpecId[i])
error(infoSink, "Contradictory local size specialization ids");
@@ -224,21 +228,16 @@ void TIntermediate::mergeModes(TInfoSink& infoSink, TIntermediate& unit)
xfbBuffers[b].implicitStride = std::max(xfbBuffers[b].implicitStride, unit.xfbBuffers[b].implicitStride);
if (unit.xfbBuffers[b].contains64BitType)
xfbBuffers[b].contains64BitType = true;
-#ifdef AMD_EXTENSIONS
if (unit.xfbBuffers[b].contains32BitType)
xfbBuffers[b].contains32BitType = true;
if (unit.xfbBuffers[b].contains16BitType)
xfbBuffers[b].contains16BitType = true;
-#endif
// TODO: 4.4 link: enhanced layouts: compare ranges
}
MERGE_TRUE(multiStream);
-
-#ifdef NV_EXTENSIONS
MERGE_TRUE(layoutOverrideCoverage);
MERGE_TRUE(geoPassthroughEXT);
-#endif
for (unsigned int i = 0; i < unit.shiftBinding.size(); ++i) {
if (unit.shiftBinding[i] > 0)
@@ -287,13 +286,8 @@ void TIntermediate::mergeTrees(TInfoSink& infoSink, TIntermediate& unit)
}
// Getting this far means we have two existing trees to merge...
-#ifdef NV_EXTENSIONS
numShaderRecordNVBlocks += unit.numShaderRecordNVBlocks;
-#endif
-
-#ifdef NV_EXTENSIONS
numTaskNVBlocks += unit.numTaskNVBlocks;
-#endif
// Get the top-level globals of each unit
TIntermSequence& globals = treeRoot->getAsAggregate()->getSequence();
@@ -315,6 +309,8 @@ void TIntermediate::mergeTrees(TInfoSink& infoSink, TIntermediate& unit)
ioAccessed.insert(unit.ioAccessed.begin(), unit.ioAccessed.end());
}
+#endif
+
// Traverser that seeds an ID map with all built-ins, and tracks the
// maximum ID used.
// (It would be nice to put this in a function, but that causes warnings
@@ -502,6 +498,7 @@ void TIntermediate::mergeImplicitArraySizes(TType& type, const TType& unitType)
//
void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& symbol, const TIntermSymbol& unitSymbol, bool crossStage)
{
+#ifndef GLSLANG_WEB
bool writeTypeComparison = false;
// Types have to match
@@ -536,7 +533,7 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
}
// Precise...
- if (! crossStage && symbol.getQualifier().noContraction != unitSymbol.getQualifier().noContraction) {
+ if (! crossStage && symbol.getQualifier().isNoContraction() != unitSymbol.getQualifier().isNoContraction()) {
error(infoSink, "Presence of precise qualifier must match:");
writeTypeComparison = true;
}
@@ -545,9 +542,9 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
if (symbol.getQualifier().centroid != unitSymbol.getQualifier().centroid ||
symbol.getQualifier().smooth != unitSymbol.getQualifier().smooth ||
symbol.getQualifier().flat != unitSymbol.getQualifier().flat ||
- symbol.getQualifier().sample != unitSymbol.getQualifier().sample ||
- symbol.getQualifier().patch != unitSymbol.getQualifier().patch ||
- symbol.getQualifier().nopersp != unitSymbol.getQualifier().nopersp) {
+ symbol.getQualifier().isSample()!= unitSymbol.getQualifier().isSample() ||
+ symbol.getQualifier().isPatch() != unitSymbol.getQualifier().isPatch() ||
+ symbol.getQualifier().isNonPerspective() != unitSymbol.getQualifier().isNonPerspective()) {
error(infoSink, "Interpolation and auxiliary storage qualifiers must match:");
writeTypeComparison = true;
}
@@ -595,6 +592,7 @@ void TIntermediate::mergeErrorCheck(TInfoSink& infoSink, const TIntermSymbol& sy
if (writeTypeComparison)
infoSink.info << " " << symbol.getName() << ": \"" << symbol.getType().getCompleteString() << "\" versus \"" <<
unitSymbol.getType().getCompleteString() << "\"\n";
+#endif
}
//
@@ -609,15 +607,12 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
return;
if (numEntryPoints < 1) {
- if (source == EShSourceGlsl)
+ if (getSource() == EShSourceGlsl)
error(infoSink, "Missing entry point: Each stage requires one entry point");
else
warn(infoSink, "Entry point not found");
}
- if (numPushConstants > 1)
- error(infoSink, "Only one push_constant block is allowed per stage");
-
// recursion and missing body checking
checkCallGraphCycles(infoSink);
checkCallGraphBodies(infoSink, keepUncalled);
@@ -625,6 +620,10 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
// overlap/alias/missing I/O, etc.
inOutLocationCheck(infoSink);
+#ifndef GLSLANG_WEB
+ if (getNumPushConstants() > 1)
+ error(infoSink, "Only one push_constant block is allowed per stage");
+
// invocations
if (invocations == TQualifier::layoutNotSet)
invocations = 1;
@@ -642,12 +641,10 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
for (size_t b = 0; b < xfbBuffers.size(); ++b) {
if (xfbBuffers[b].contains64BitType)
RoundToPow2(xfbBuffers[b].implicitStride, 8);
-#ifdef AMD_EXTENSIONS
else if (xfbBuffers[b].contains32BitType)
RoundToPow2(xfbBuffers[b].implicitStride, 4);
else if (xfbBuffers[b].contains16BitType)
RoundToPow2(xfbBuffers[b].implicitStride, 2);
-#endif
// "It is a compile-time or link-time error to have
// any xfb_offset that overflows xfb_stride, whether stated on declarations before or after the xfb_stride, or
@@ -668,16 +665,11 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
error(infoSink, "xfb_stride must be multiple of 8 for buffer holding a double or 64-bit integer:");
infoSink.info.prefix(EPrefixError);
infoSink.info << " xfb_buffer " << (unsigned int)b << ", xfb_stride " << xfbBuffers[b].stride << "\n";
-#ifdef AMD_EXTENSIONS
} else if (xfbBuffers[b].contains32BitType && ! IsMultipleOfPow2(xfbBuffers[b].stride, 4)) {
-#else
- } else if (! IsMultipleOfPow2(xfbBuffers[b].stride, 4)) {
-#endif
error(infoSink, "xfb_stride must be multiple of 4:");
infoSink.info.prefix(EPrefixError);
infoSink.info << " xfb_buffer " << (unsigned int)b << ", xfb_stride " << xfbBuffers[b].stride << "\n";
}
-#ifdef AMD_EXTENSIONS
// "If the buffer is capturing any
// outputs with half-precision or 16-bit integer components, the stride must be a multiple of 2"
else if (xfbBuffers[b].contains16BitType && ! IsMultipleOfPow2(xfbBuffers[b].stride, 2)) {
@@ -686,7 +678,6 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
infoSink.info << " xfb_buffer " << (unsigned int)b << ", xfb_stride " << xfbBuffers[b].stride << "\n";
}
-#endif
// "The resulting stride (implicit or explicit), when divided by 4, must be less than or equal to the
// implementation-dependent constant gl_MaxTransformFeedbackInterleavedComponents."
if (xfbBuffers[b].stride > (unsigned int)(4 * resources.maxTransformFeedbackInterleavedComponents)) {
@@ -704,7 +695,7 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
error(infoSink, "At least one shader must specify an output layout(vertices=...)");
break;
case EShLangTessEvaluation:
- if (source == EShSourceGlsl) {
+ if (getSource() == EShSourceGlsl) {
if (inputPrimitive == ElgNone)
error(infoSink, "At least one shader must specify an input layout primitive");
if (vertexSpacing == EvsNone)
@@ -730,8 +721,6 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
break;
case EShLangCompute:
break;
-
-#ifdef NV_EXTENSIONS
case EShLangRayGenNV:
case EShLangIntersectNV:
case EShLangAnyHitNV:
@@ -764,8 +753,6 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
if (numTaskNVBlocks > 1)
error(infoSink, "Only one taskNV interface block is allowed per shader");
break;
-#endif
-
default:
error(infoSink, "Unknown Stage.");
break;
@@ -787,6 +774,7 @@ void TIntermediate::finalCheck(TInfoSink& infoSink, bool keepUncalled)
} finalLinkTraverser;
treeRoot->traverse(&finalLinkTraverser);
+#endif
}
//
@@ -973,7 +961,7 @@ void TIntermediate::inOutLocationCheck(TInfoSink& infoSink)
}
}
- if (profile == EEsProfile) {
+ if (isEsProfile()) {
if (numFragOut > 1 && fragOutWithNoLocation)
error(infoSink, "when more than one fragment shader output, all must have location qualifiers");
}
@@ -1066,6 +1054,7 @@ int TIntermediate::addUsedLocation(const TQualifier& qualifier, const TType& typ
// So, for the case of dvec3, we need two independent ioRanges.
int collision = -1; // no collision
+#ifndef GLSLANG_WEB
if (size == 2 && type.getBasicType() == EbtDouble && type.getVectorSize() == 3 &&
(qualifier.isPipeInput() || qualifier.isPipeOutput())) {
// Dealing with dvec3 in/out split across two locations.
@@ -1092,7 +1081,9 @@ int TIntermediate::addUsedLocation(const TQualifier& qualifier, const TType& typ
if (collision < 0)
usedIo[set].push_back(range2);
}
- } else {
+ } else
+#endif
+ {
// Not a dvec3 in/out split across two locations, generic path.
// Need a single IO-range block.
@@ -1106,10 +1097,10 @@ int TIntermediate::addUsedLocation(const TQualifier& qualifier, const TType& typ
}
// combine location and component ranges
- TIoRange range(locationRange, componentRange, type.getBasicType(), qualifier.hasIndex() ? qualifier.layoutIndex : 0);
+ TIoRange range(locationRange, componentRange, type.getBasicType(), qualifier.hasIndex() ? qualifier.getIndex() : 0);
// check for collisions, except for vertex inputs on desktop targeting OpenGL
- if (! (profile != EEsProfile && language == EShLangVertex && qualifier.isPipeInput()) || spvVersion.vulkan > 0)
+ if (! (!isEsProfile() && language == EShLangVertex && qualifier.isPipeInput()) || spvVersion.vulkan > 0)
collision = checkLocationRange(set, range, type, typeCollision);
if (collision < 0)
@@ -1187,14 +1178,10 @@ int TIntermediate::computeTypeLocationSize(const TType& type, EShLanguage stage)
// TODO: perf: this can be flattened by using getCumulativeArraySize(), and a deref that discards all arrayness
// TODO: are there valid cases of having an unsized array with a location? If so, running this code too early.
TType elementType(type, 0);
- if (type.isSizedArray()
-#ifdef NV_EXTENSIONS
- && !type.getQualifier().isPerView()
-#endif
- )
+ if (type.isSizedArray() && !type.getQualifier().isPerView())
return type.getOuterArraySize() * computeTypeLocationSize(elementType, stage);
else {
-#ifdef NV_EXTENSIONS
+#ifndef GLSLANG_WEB
// unset perViewNV attributes for arrayed per-view outputs: "perviewNV vec4 v[MAX_VIEWS][3];"
elementType.getQualifier().perViewNV = false;
#endif
@@ -1273,6 +1260,8 @@ int TIntermediate::computeTypeUniformLocationSize(const TType& type)
return 1;
}
+#ifndef GLSLANG_WEB
+
// Accumulate xfb buffer ranges and check for collisions as the accumulation is done.
//
// Returns < 0 if no collision, >= 0 if collision and the value returned is a colliding value.
@@ -1285,11 +1274,7 @@ int TIntermediate::addXfbBufferOffset(const TType& type)
TXfbBuffer& buffer = xfbBuffers[qualifier.layoutXfbBuffer];
// compute the range
-#ifdef AMD_EXTENSIONS
unsigned int size = computeTypeXfbSize(type, buffer.contains64BitType, buffer.contains32BitType, buffer.contains16BitType);
-#else
- unsigned int size = computeTypeXfbSize(type, buffer.contains64BitType);
-#endif
buffer.implicitStride = std::max(buffer.implicitStride, qualifier.layoutXfbOffset + size);
TRange range(qualifier.layoutXfbOffset, qualifier.layoutXfbOffset + size - 1);
@@ -1309,15 +1294,10 @@ int TIntermediate::addXfbBufferOffset(const TType& type)
// Recursively figure out how many bytes of xfb buffer are used by the given type.
// Return the size of type, in bytes.
// Sets contains64BitType to true if the type contains a 64-bit data type.
-#ifdef AMD_EXTENSIONS
// Sets contains32BitType to true if the type contains a 32-bit data type.
// Sets contains16BitType to true if the type contains a 16-bit data type.
// N.B. Caller must set contains64BitType, contains32BitType, and contains16BitType to false before calling.
unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains64BitType, bool& contains32BitType, bool& contains16BitType) const
-#else
-// N.B. Caller must set contains64BitType to false before calling.
-unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains64BitType) const
-#endif
{
// "...if applied to an aggregate containing a double or 64-bit integer, the offset must also be a multiple of 8,
// and the space taken in the buffer will be a multiple of 8.
@@ -1330,44 +1310,32 @@ unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains
// TODO: perf: this can be flattened by using getCumulativeArraySize(), and a deref that discards all arrayness
assert(type.isSizedArray());
TType elementType(type, 0);
-#ifdef AMD_EXTENSIONS
return type.getOuterArraySize() * computeTypeXfbSize(elementType, contains64BitType, contains16BitType, contains16BitType);
-#else
- return type.getOuterArraySize() * computeTypeXfbSize(elementType, contains64BitType);
-#endif
}
if (type.isStruct()) {
unsigned int size = 0;
bool structContains64BitType = false;
-#ifdef AMD_EXTENSIONS
bool structContains32BitType = false;
bool structContains16BitType = false;
-#endif
for (int member = 0; member < (int)type.getStruct()->size(); ++member) {
TType memberType(type, member);
// "... if applied to
// an aggregate containing a double or 64-bit integer, the offset must also be a multiple of 8,
// and the space taken in the buffer will be a multiple of 8."
bool memberContains64BitType = false;
-#ifdef AMD_EXTENSIONS
bool memberContains32BitType = false;
bool memberContains16BitType = false;
int memberSize = computeTypeXfbSize(memberType, memberContains64BitType, memberContains32BitType, memberContains16BitType);
-#else
- int memberSize = computeTypeXfbSize(memberType, memberContains64BitType);
-#endif
if (memberContains64BitType) {
structContains64BitType = true;
RoundToPow2(size, 8);
-#ifdef AMD_EXTENSIONS
} else if (memberContains32BitType) {
structContains32BitType = true;
RoundToPow2(size, 4);
} else if (memberContains16BitType) {
structContains16BitType = true;
RoundToPow2(size, 2);
-#endif
}
size += memberSize;
}
@@ -1375,14 +1343,12 @@ unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains
if (structContains64BitType) {
contains64BitType = true;
RoundToPow2(size, 8);
-#ifdef AMD_EXTENSIONS
} else if (structContains32BitType) {
contains32BitType = true;
RoundToPow2(size, 4);
} else if (structContains16BitType) {
contains16BitType = true;
RoundToPow2(size, 2);
-#endif
}
return size;
}
@@ -1402,7 +1368,6 @@ unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains
if (type.getBasicType() == EbtDouble || type.getBasicType() == EbtInt64 || type.getBasicType() == EbtUint64) {
contains64BitType = true;
return 8 * numComponents;
-#ifdef AMD_EXTENSIONS
} else if (type.getBasicType() == EbtFloat16 || type.getBasicType() == EbtInt16 || type.getBasicType() == EbtUint16) {
contains16BitType = true;
return 2 * numComponents;
@@ -1412,12 +1377,10 @@ unsigned int TIntermediate::computeTypeXfbSize(const TType& type, bool& contains
contains32BitType = true;
return 4 * numComponents;
}
-#else
- } else
- return 4 * numComponents;
-#endif
}
+#endif
+
const int baseAlignmentVec4Std140 = 16;
// Return the size and alignment of a component of the given type.
@@ -1425,6 +1388,10 @@ const int baseAlignmentVec4Std140 = 16;
// Return value is the alignment..
int TIntermediate::getBaseAlignmentScalar(const TType& type, int& size)
{
+#ifdef GLSLANG_WEB
+ size = 4; return 4;
+#endif
+
switch (type.getBasicType()) {
case EbtInt64:
case EbtUint64:
@@ -1741,7 +1708,7 @@ int TIntermediate::getBlockSize(const TType& blockType)
int TIntermediate::computeBufferReferenceTypeSize(const TType& type)
{
- assert(type.getBasicType() == EbtReference);
+ assert(type.isReference());
int size = getBlockSize(*type.getReferentType());
int align = type.getBufferReferenceAlignment();
diff --git a/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h b/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h
index f29c93c08e..683290af74 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/localintermediate.h
@@ -147,23 +147,19 @@ struct TOffsetRange {
TRange offset;
};
+#ifndef GLSLANG_WEB
// Things that need to be tracked per xfb buffer.
struct TXfbBuffer {
-#ifdef AMD_EXTENSIONS
TXfbBuffer() : stride(TQualifier::layoutXfbStrideEnd), implicitStride(0), contains64BitType(false),
contains32BitType(false), contains16BitType(false) { }
-#else
- TXfbBuffer() : stride(TQualifier::layoutXfbStrideEnd), implicitStride(0), contains64BitType(false) { }
-#endif
std::vector<TRange> ranges; // byte offsets that have already been assigned
unsigned int stride;
unsigned int implicitStride;
bool contains64BitType;
-#ifdef AMD_EXTENSIONS
bool contains32BitType;
bool contains16BitType;
-#endif
};
+#endif
// Track a set of strings describing how the module was processed.
// Using the form:
@@ -217,7 +213,6 @@ class TSymbolTable;
class TSymbol;
class TVariable;
-#ifdef NV_EXTENSIONS
//
// Texture and Sampler transformation mode.
//
@@ -226,7 +221,6 @@ enum ComputeDerivativeMode {
LayoutDerivativeGroupQuads, // derivative_group_quadsNV
LayoutDerivativeGroupLinear, // derivative_group_linearNV
};
-#endif
//
// Set of helper functions to help parse and build the tree.
@@ -234,32 +228,36 @@ enum ComputeDerivativeMode {
class TIntermediate {
public:
explicit TIntermediate(EShLanguage l, int v = 0, EProfile p = ENoProfile) :
- implicitThisName("@this"), implicitCounterName("@count"),
- language(l), source(EShSourceNone), profile(p), version(v), treeRoot(0),
+ language(l),
+ profile(p), version(v), treeRoot(0),
numEntryPoints(0), numErrors(0), numPushConstants(0), recursive(false),
+ invertY(false),
+ useStorageBuffer(false),
+ nanMinMaxClamp(false),
+ depthReplacing(false)
+#ifndef GLSLANG_WEB
+ ,
+ implicitThisName("@this"), implicitCounterName("@count"),
+ source(EShSourceNone),
+ useVulkanMemoryModel(false),
invocations(TQualifier::layoutNotSet), vertices(TQualifier::layoutNotSet),
inputPrimitive(ElgNone), outputPrimitive(ElgNone),
pixelCenterInteger(false), originUpperLeft(false),
- vertexSpacing(EvsNone), vertexOrder(EvoNone), pointMode(false), earlyFragmentTests(false),
- postDepthCoverage(false), depthLayout(EldNone), depthReplacing(false),
+ vertexSpacing(EvsNone), vertexOrder(EvoNone), interlockOrdering(EioNone), pointMode(false), earlyFragmentTests(false),
+ postDepthCoverage(false), depthLayout(EldNone),
hlslFunctionality1(false),
blendEquations(0), xfbMode(false), multiStream(false),
-#ifdef NV_EXTENSIONS
layoutOverrideCoverage(false),
geoPassthroughEXT(false),
numShaderRecordNVBlocks(0),
computeDerivativeMode(LayoutDerivativeNone),
primitives(TQualifier::layoutNotSet),
numTaskNVBlocks(0),
-#endif
autoMapBindings(false),
autoMapLocations(false),
- invertY(false),
flattenUniformArrays(false),
useUnknownFormat(false),
hlslOffsets(false),
- useStorageBuffer(false),
- useVulkanMemoryModel(false),
hlslIoMapping(false),
useVariablePointers(false),
textureSamplerTransformMode(EShTexSampTransKeep),
@@ -267,160 +265,22 @@ public:
binaryDoubleOutput(false),
usePhysicalStorageBuffer(false),
uniformLocationBase(0)
+#endif
{
localSize[0] = 1;
localSize[1] = 1;
localSize[2] = 1;
+ localSizeNotDefault[0] = false;
+ localSizeNotDefault[1] = false;
+ localSizeNotDefault[2] = false;
localSizeSpecId[0] = TQualifier::layoutNotSet;
localSizeSpecId[1] = TQualifier::layoutNotSet;
localSizeSpecId[2] = TQualifier::layoutNotSet;
+#ifndef GLSLANG_WEB
xfbBuffers.resize(TQualifier::layoutXfbBufferEnd);
-
shiftBinding.fill(0);
+#endif
}
- void setLimits(const TBuiltInResource& r) { resources = r; }
-
- bool postProcess(TIntermNode*, EShLanguage);
- void output(TInfoSink&, bool tree);
- void removeTree();
-
- void setSource(EShSource s) { source = s; }
- EShSource getSource() const { return source; }
- void setEntryPointName(const char* ep)
- {
- entryPointName = ep;
- processes.addProcess("entry-point");
- processes.addArgument(entryPointName);
- }
- void setEntryPointMangledName(const char* ep) { entryPointMangledName = ep; }
- const std::string& getEntryPointName() const { return entryPointName; }
- const std::string& getEntryPointMangledName() const { return entryPointMangledName; }
-
- void setShiftBinding(TResourceType res, unsigned int shift)
- {
- shiftBinding[res] = shift;
-
- const char* name = getResourceName(res);
- if (name != nullptr)
- processes.addIfNonZero(name, shift);
- }
-
- unsigned int getShiftBinding(TResourceType res) const { return shiftBinding[res]; }
-
- void setShiftBindingForSet(TResourceType res, unsigned int shift, unsigned int set)
- {
- if (shift == 0) // ignore if there's no shift: it's a no-op.
- return;
-
- shiftBindingForSet[res][set] = shift;
-
- const char* name = getResourceName(res);
- if (name != nullptr) {
- processes.addProcess(name);
- processes.addArgument(shift);
- processes.addArgument(set);
- }
- }
-
- int getShiftBindingForSet(TResourceType res, unsigned int set) const
- {
- const auto shift = shiftBindingForSet[res].find(set);
- return shift == shiftBindingForSet[res].end() ? -1 : shift->second;
- }
- bool hasShiftBindingForSet(TResourceType res) const { return !shiftBindingForSet[res].empty(); }
-
- void setResourceSetBinding(const std::vector<std::string>& shift)
- {
- resourceSetBinding = shift;
- if (shift.size() > 0) {
- processes.addProcess("resource-set-binding");
- for (int s = 0; s < (int)shift.size(); ++s)
- processes.addArgument(shift[s]);
- }
- }
- const std::vector<std::string>& getResourceSetBinding() const { return resourceSetBinding; }
- void setAutoMapBindings(bool map)
- {
- autoMapBindings = map;
- if (autoMapBindings)
- processes.addProcess("auto-map-bindings");
- }
- bool getAutoMapBindings() const { return autoMapBindings; }
- void setAutoMapLocations(bool map)
- {
- autoMapLocations = map;
- if (autoMapLocations)
- processes.addProcess("auto-map-locations");
- }
- bool getAutoMapLocations() const { return autoMapLocations; }
- void setInvertY(bool invert)
- {
- invertY = invert;
- if (invertY)
- processes.addProcess("invert-y");
- }
- bool getInvertY() const { return invertY; }
-
- void setFlattenUniformArrays(bool flatten)
- {
- flattenUniformArrays = flatten;
- if (flattenUniformArrays)
- processes.addProcess("flatten-uniform-arrays");
- }
- bool getFlattenUniformArrays() const { return flattenUniformArrays; }
- void setNoStorageFormat(bool b)
- {
- useUnknownFormat = b;
- if (useUnknownFormat)
- processes.addProcess("no-storage-format");
- }
- bool getNoStorageFormat() const { return useUnknownFormat; }
- void setHlslOffsets()
- {
- hlslOffsets = true;
- if (hlslOffsets)
- processes.addProcess("hlsl-offsets");
- }
- bool usingHlslOffsets() const { return hlslOffsets; }
- void setUseStorageBuffer()
- {
- useStorageBuffer = true;
- processes.addProcess("use-storage-buffer");
- }
- bool usingStorageBuffer() const { return useStorageBuffer; }
- void setHlslIoMapping(bool b)
- {
- hlslIoMapping = b;
- if (hlslIoMapping)
- processes.addProcess("hlsl-iomap");
- }
- bool usingHlslIoMapping() { return hlslIoMapping; }
- void setUseVulkanMemoryModel()
- {
- useVulkanMemoryModel = true;
- processes.addProcess("use-vulkan-memory-model");
- }
- bool usingVulkanMemoryModel() const { return useVulkanMemoryModel; }
- void setUsePhysicalStorageBuffer()
- {
- usePhysicalStorageBuffer = true;
- }
- bool usingPhysicalStorageBuffer() const { return usePhysicalStorageBuffer; }
- void setUseVariablePointers()
- {
- useVariablePointers = true;
- processes.addProcess("use-variable-pointers");
- }
- bool usingVariablePointers() const { return useVariablePointers; }
-
- template<class T> T addCounterBufferName(const T& name) const { return name + implicitCounterName; }
- bool hasCounterBufferName(const TString& name) const {
- size_t len = strlen(implicitCounterName);
- return name.size() > len &&
- name.compare(name.size() - len, len, implicitCounterName) == 0;
- }
-
- void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode) { textureSamplerTransformMode = mode; }
void setVersion(int v) { version = v; }
int getVersion() const { return version; }
@@ -451,6 +311,12 @@ public:
case EShTargetSpv_1_3:
processes.addProcess("target-env spirv1.3");
break;
+ case EShTargetSpv_1_4:
+ processes.addProcess("target-env spirv1.4");
+ break;
+ case EShTargetSpv_1_5:
+ processes.addProcess("target-env spirv1.5");
+ break;
default:
processes.addProcess("target-env spirvUnknown");
break;
@@ -466,6 +332,9 @@ public:
case EShTargetVulkan_1_1:
processes.addProcess("target-env vulkan1.1");
break;
+ case EShTargetVulkan_1_2:
+ processes.addProcess("target-env vulkan1.2");
+ break;
default:
processes.addProcess("target-env vulkanUnknown");
break;
@@ -484,9 +353,35 @@ public:
int getNumEntryPoints() const { return numEntryPoints; }
int getNumErrors() const { return numErrors; }
void addPushConstantCount() { ++numPushConstants; }
-#ifdef NV_EXTENSIONS
- void addShaderRecordNVCount() { ++numShaderRecordNVBlocks; }
- void addTaskNVCount() { ++numTaskNVBlocks; }
+ void setLimits(const TBuiltInResource& r) { resources = r; }
+
+ bool postProcess(TIntermNode*, EShLanguage);
+ void removeTree();
+
+ void setEntryPointName(const char* ep)
+ {
+ entryPointName = ep;
+ processes.addProcess("entry-point");
+ processes.addArgument(entryPointName);
+ }
+ void setEntryPointMangledName(const char* ep) { entryPointMangledName = ep; }
+ const std::string& getEntryPointName() const { return entryPointName; }
+ const std::string& getEntryPointMangledName() const { return entryPointMangledName; }
+
+ void setInvertY(bool invert)
+ {
+ invertY = invert;
+ if (invertY)
+ processes.addProcess("invert-y");
+ }
+ bool getInvertY() const { return invertY; }
+
+#ifdef ENABLE_HLSL
+ void setSource(EShSource s) { source = s; }
+ EShSource getSource() const { return source; }
+#else
+ void setSource(EShSource s) { assert(s == EShSourceGlsl); }
+ EShSource getSource() const { return EShSourceGlsl; }
#endif
bool isRecursive() const { return recursive; }
@@ -565,6 +460,169 @@ public:
void addSymbolLinkageNodes(TIntermAggregate*& linkage, EShLanguage, TSymbolTable&);
void addSymbolLinkageNode(TIntermAggregate*& linkage, const TSymbol&);
+ void setUseStorageBuffer()
+ {
+ useStorageBuffer = true;
+ processes.addProcess("use-storage-buffer");
+ }
+ bool usingStorageBuffer() const { return useStorageBuffer; }
+ void setDepthReplacing() { depthReplacing = true; }
+ bool isDepthReplacing() const { return depthReplacing; }
+ bool setLocalSize(int dim, int size)
+ {
+ if (localSizeNotDefault[dim])
+ return size == localSize[dim];
+ localSizeNotDefault[dim] = true;
+ localSize[dim] = size;
+ return true;
+ }
+ unsigned int getLocalSize(int dim) const { return localSize[dim]; }
+ bool setLocalSizeSpecId(int dim, int id)
+ {
+ if (localSizeSpecId[dim] != TQualifier::layoutNotSet)
+ return id == localSizeSpecId[dim];
+ localSizeSpecId[dim] = id;
+ return true;
+ }
+ int getLocalSizeSpecId(int dim) const { return localSizeSpecId[dim]; }
+#ifdef GLSLANG_WEB
+ void output(TInfoSink&, bool tree) { }
+
+ bool isEsProfile() const { return false; }
+ bool getXfbMode() const { return false; }
+ bool isMultiStream() const { return false; }
+ TLayoutGeometry getOutputPrimitive() const { return ElgNone; }
+ bool getPostDepthCoverage() const { return false; }
+ bool getEarlyFragmentTests() const { return false; }
+ TLayoutDepth getDepth() const { return EldNone; }
+ bool getPixelCenterInteger() const { return false; }
+ void setOriginUpperLeft() { }
+ bool getOriginUpperLeft() const { return true; }
+ TInterlockOrdering getInterlockOrdering() const { return EioNone; }
+
+ bool getAutoMapBindings() const { return false; }
+ bool getAutoMapLocations() const { return false; }
+ int getNumPushConstants() const { return 0; }
+ void addShaderRecordNVCount() { }
+ void addTaskNVCount() { }
+ void setUseVulkanMemoryModel() { }
+ bool usingVulkanMemoryModel() const { return false; }
+ bool usingPhysicalStorageBuffer() const { return false; }
+ bool usingVariablePointers() const { return false; }
+ unsigned getXfbStride(int buffer) const { return 0; }
+ bool hasLayoutDerivativeModeNone() const { return false; }
+ ComputeDerivativeMode getLayoutDerivativeModeNone() const { return LayoutDerivativeNone; }
+#else
+ void output(TInfoSink&, bool tree);
+
+ bool isEsProfile() const { return profile == EEsProfile; }
+
+ void setShiftBinding(TResourceType res, unsigned int shift)
+ {
+ shiftBinding[res] = shift;
+
+ const char* name = getResourceName(res);
+ if (name != nullptr)
+ processes.addIfNonZero(name, shift);
+ }
+
+ unsigned int getShiftBinding(TResourceType res) const { return shiftBinding[res]; }
+
+ void setShiftBindingForSet(TResourceType res, unsigned int shift, unsigned int set)
+ {
+ if (shift == 0) // ignore if there's no shift: it's a no-op.
+ return;
+
+ shiftBindingForSet[res][set] = shift;
+
+ const char* name = getResourceName(res);
+ if (name != nullptr) {
+ processes.addProcess(name);
+ processes.addArgument(shift);
+ processes.addArgument(set);
+ }
+ }
+
+ int getShiftBindingForSet(TResourceType res, unsigned int set) const
+ {
+ const auto shift = shiftBindingForSet[res].find(set);
+ return shift == shiftBindingForSet[res].end() ? -1 : shift->second;
+ }
+ bool hasShiftBindingForSet(TResourceType res) const { return !shiftBindingForSet[res].empty(); }
+
+ void setResourceSetBinding(const std::vector<std::string>& shift)
+ {
+ resourceSetBinding = shift;
+ if (shift.size() > 0) {
+ processes.addProcess("resource-set-binding");
+ for (int s = 0; s < (int)shift.size(); ++s)
+ processes.addArgument(shift[s]);
+ }
+ }
+ const std::vector<std::string>& getResourceSetBinding() const { return resourceSetBinding; }
+ void setAutoMapBindings(bool map)
+ {
+ autoMapBindings = map;
+ if (autoMapBindings)
+ processes.addProcess("auto-map-bindings");
+ }
+ bool getAutoMapBindings() const { return autoMapBindings; }
+ void setAutoMapLocations(bool map)
+ {
+ autoMapLocations = map;
+ if (autoMapLocations)
+ processes.addProcess("auto-map-locations");
+ }
+ bool getAutoMapLocations() const { return autoMapLocations; }
+
+#ifdef ENABLE_HLSL
+ void setFlattenUniformArrays(bool flatten)
+ {
+ flattenUniformArrays = flatten;
+ if (flattenUniformArrays)
+ processes.addProcess("flatten-uniform-arrays");
+ }
+ bool getFlattenUniformArrays() const { return flattenUniformArrays; }
+#endif
+ void setNoStorageFormat(bool b)
+ {
+ useUnknownFormat = b;
+ if (useUnknownFormat)
+ processes.addProcess("no-storage-format");
+ }
+ bool getNoStorageFormat() const { return useUnknownFormat; }
+ void setUseVulkanMemoryModel()
+ {
+ useVulkanMemoryModel = true;
+ processes.addProcess("use-vulkan-memory-model");
+ }
+ bool usingVulkanMemoryModel() const { return useVulkanMemoryModel; }
+ void setUsePhysicalStorageBuffer()
+ {
+ usePhysicalStorageBuffer = true;
+ }
+ bool usingPhysicalStorageBuffer() const { return usePhysicalStorageBuffer; }
+ void setUseVariablePointers()
+ {
+ useVariablePointers = true;
+ processes.addProcess("use-variable-pointers");
+ }
+ bool usingVariablePointers() const { return useVariablePointers; }
+
+#ifdef ENABLE_HLSL
+ template<class T> T addCounterBufferName(const T& name) const { return name + implicitCounterName; }
+ bool hasCounterBufferName(const TString& name) const {
+ size_t len = strlen(implicitCounterName);
+ return name.size() > len &&
+ name.compare(name.size() - len, len, implicitCounterName) == 0;
+ }
+#endif
+
+ void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode) { textureSamplerTransformMode = mode; }
+ int getNumPushConstants() const { return numPushConstants; }
+ void addShaderRecordNVCount() { ++numShaderRecordNVBlocks; }
+ void addTaskNVCount() { ++numTaskNVBlocks; }
+
bool setInvocations(int i)
{
if (invocations != TQualifier::layoutNotSet)
@@ -608,23 +666,14 @@ public:
void setPointMode() { pointMode = true; }
bool getPointMode() const { return pointMode; }
- bool setLocalSize(int dim, int size)
- {
- if (localSize[dim] > 1)
- return size == localSize[dim];
- localSize[dim] = size;
- return true;
- }
- unsigned int getLocalSize(int dim) const { return localSize[dim]; }
-
- bool setLocalSizeSpecId(int dim, int id)
+ bool setInterlockOrdering(TInterlockOrdering o)
{
- if (localSizeSpecId[dim] != TQualifier::layoutNotSet)
- return id == localSizeSpecId[dim];
- localSizeSpecId[dim] = id;
+ if (interlockOrdering != EioNone)
+ return interlockOrdering == o;
+ interlockOrdering = o;
return true;
}
- int getLocalSizeSpecId(int dim) const { return localSizeSpecId[dim]; }
+ TInterlockOrdering getInterlockOrdering() const { return interlockOrdering; }
void setXfbMode() { xfbMode = true; }
bool getXfbMode() const { return xfbMode; }
@@ -638,14 +687,10 @@ public:
return true;
}
TLayoutGeometry getOutputPrimitive() const { return outputPrimitive; }
- void setOriginUpperLeft() { originUpperLeft = true; }
- bool getOriginUpperLeft() const { return originUpperLeft; }
- void setPixelCenterInteger() { pixelCenterInteger = true; }
- bool getPixelCenterInteger() const { return pixelCenterInteger; }
- void setEarlyFragmentTests() { earlyFragmentTests = true; }
- bool getEarlyFragmentTests() const { return earlyFragmentTests; }
void setPostDepthCoverage() { postDepthCoverage = true; }
bool getPostDepthCoverage() const { return postDepthCoverage; }
+ void setEarlyFragmentTests() { earlyFragmentTests = true; }
+ bool getEarlyFragmentTests() const { return earlyFragmentTests; }
bool setDepth(TLayoutDepth d)
{
if (depthLayout != EldNone)
@@ -654,29 +699,12 @@ public:
return true;
}
TLayoutDepth getDepth() const { return depthLayout; }
- void setDepthReplacing() { depthReplacing = true; }
- bool isDepthReplacing() const { return depthReplacing; }
-
- void setHlslFunctionality1() { hlslFunctionality1 = true; }
- bool getHlslFunctionality1() const { return hlslFunctionality1; }
-
+ void setOriginUpperLeft() { originUpperLeft = true; }
+ bool getOriginUpperLeft() const { return originUpperLeft; }
+ void setPixelCenterInteger() { pixelCenterInteger = true; }
+ bool getPixelCenterInteger() const { return pixelCenterInteger; }
void addBlendEquation(TBlendEquationShift b) { blendEquations |= (1 << b); }
unsigned int getBlendEquations() const { return blendEquations; }
-
- void addToCallGraph(TInfoSink&, const TString& caller, const TString& callee);
- void merge(TInfoSink&, TIntermediate&);
- void finalCheck(TInfoSink&, bool keepUncalled);
-
- void addIoAccessed(const TString& name) { ioAccessed.insert(name); }
- bool inIoAccessed(const TString& name) const { return ioAccessed.find(name) != ioAccessed.end(); }
-
- int addUsedLocation(const TQualifier&, const TType&, bool& typeCollision);
- int checkLocationRange(int set, const TIoRange& range, const TType&, bool& typeCollision);
- int addUsedOffsets(int binding, int offset, int numOffsets);
- bool addUsedConstantId(int id);
- static int computeTypeLocationSize(const TType&, EShLanguage);
- static int computeTypeUniformLocationSize(const TType&);
-
bool setXfbBufferStride(int buffer, unsigned stride)
{
if (xfbBuffers[buffer].stride != TQualifier::layoutXfbStrideEnd)
@@ -686,28 +714,14 @@ public:
}
unsigned getXfbStride(int buffer) const { return xfbBuffers[buffer].stride; }
int addXfbBufferOffset(const TType&);
-#ifdef AMD_EXTENSIONS
unsigned int computeTypeXfbSize(const TType&, bool& contains64BitType, bool& contains32BitType, bool& contains16BitType) const;
-#else
unsigned int computeTypeXfbSize(const TType&, bool& contains64BitType) const;
-#endif
- static int getBaseAlignmentScalar(const TType&, int& size);
- static int getBaseAlignment(const TType&, int& size, int& stride, TLayoutPacking layoutPacking, bool rowMajor);
- static int getScalarAlignment(const TType&, int& size, int& stride, bool rowMajor);
- static int getMemberAlignment(const TType&, int& size, int& stride, TLayoutPacking layoutPacking, bool rowMajor);
- static bool improperStraddle(const TType& type, int size, int offset);
- static void updateOffset(const TType& parentType, const TType& memberType, int& offset, int& memberSize);
- static int getOffset(const TType& type, int index);
- static int getBlockSize(const TType& blockType);
- static int computeBufferReferenceTypeSize(const TType&);
- bool promote(TIntermOperator*);
-
-#ifdef NV_EXTENSIONS
void setLayoutOverrideCoverage() { layoutOverrideCoverage = true; }
bool getLayoutOverrideCoverage() const { return layoutOverrideCoverage; }
void setGeoPassthroughEXT() { geoPassthroughEXT = true; }
bool getGeoPassthroughEXT() const { return geoPassthroughEXT; }
void setLayoutDerivativeMode(ComputeDerivativeMode mode) { computeDerivativeMode = mode; }
+ bool hasLayoutDerivativeModeNone() const { return computeDerivativeMode != LayoutDerivativeNone; }
ComputeDerivativeMode getLayoutDerivativeModeNone() const { return computeDerivativeMode; }
bool setPrimitives(int m)
{
@@ -717,28 +731,10 @@ public:
return true;
}
int getPrimitives() const { return primitives; }
-#endif
-
const char* addSemanticName(const TString& name)
{
return semanticNameSet.insert(name).first->c_str();
}
-
- void setSourceFile(const char* file) { if (file != nullptr) sourceFile = file; }
- const std::string& getSourceFile() const { return sourceFile; }
- void addSourceText(const char* text, size_t len) { sourceText.append(text, len); }
- const std::string& getSourceText() const { return sourceText; }
- const std::map<std::string, std::string>& getIncludeText() const { return includeText; }
- void addIncludeText(const char* name, const char* text, size_t len) { includeText[name].assign(text,len); }
- void addProcesses(const std::vector<std::string>& p)
- {
- for (int i = 0; i < (int)p.size(); ++i)
- processes.addProcess(p[i]);
- }
- void addProcess(const std::string& process) { processes.addProcess(process); }
- void addProcessArgument(const std::string& arg) { processes.addArgument(arg); }
- const std::vector<std::string>& getProcesses() const { return processes.getProcesses(); }
-
void addUniformLocationOverride(const char* nameStr, int location)
{
std::string name = nameStr;
@@ -763,9 +759,98 @@ public:
void setBinaryDoubleOutput() { binaryDoubleOutput = true; }
bool getBinaryDoubleOutput() { return binaryDoubleOutput; }
+#endif // GLSLANG_WEB
- const char* const implicitThisName;
- const char* const implicitCounterName;
+#ifdef ENABLE_HLSL
+ void setHlslFunctionality1() { hlslFunctionality1 = true; }
+ bool getHlslFunctionality1() const { return hlslFunctionality1; }
+ void setHlslOffsets()
+ {
+ hlslOffsets = true;
+ if (hlslOffsets)
+ processes.addProcess("hlsl-offsets");
+ }
+ bool usingHlslOffsets() const { return hlslOffsets; }
+ void setHlslIoMapping(bool b)
+ {
+ hlslIoMapping = b;
+ if (hlslIoMapping)
+ processes.addProcess("hlsl-iomap");
+ }
+ bool usingHlslIoMapping() { return hlslIoMapping; }
+#else
+ bool getHlslFunctionality1() const { return false; }
+ bool usingHlslOffsets() const { return false; }
+ bool usingHlslIoMapping() { return false; }
+#endif
+
+ void addToCallGraph(TInfoSink&, const TString& caller, const TString& callee);
+ void merge(TInfoSink&, TIntermediate&);
+ void finalCheck(TInfoSink&, bool keepUncalled);
+
+ bool buildConvertOp(TBasicType dst, TBasicType src, TOperator& convertOp) const;
+ TIntermTyped* createConversion(TBasicType convertTo, TIntermTyped* node) const;
+
+ void addIoAccessed(const TString& name) { ioAccessed.insert(name); }
+ bool inIoAccessed(const TString& name) const { return ioAccessed.find(name) != ioAccessed.end(); }
+
+ int addUsedLocation(const TQualifier&, const TType&, bool& typeCollision);
+ int checkLocationRange(int set, const TIoRange& range, const TType&, bool& typeCollision);
+ int addUsedOffsets(int binding, int offset, int numOffsets);
+ bool addUsedConstantId(int id);
+ static int computeTypeLocationSize(const TType&, EShLanguage);
+ static int computeTypeUniformLocationSize(const TType&);
+
+ static int getBaseAlignmentScalar(const TType&, int& size);
+ static int getBaseAlignment(const TType&, int& size, int& stride, TLayoutPacking layoutPacking, bool rowMajor);
+ static int getScalarAlignment(const TType&, int& size, int& stride, bool rowMajor);
+ static int getMemberAlignment(const TType&, int& size, int& stride, TLayoutPacking layoutPacking, bool rowMajor);
+ static bool improperStraddle(const TType& type, int size, int offset);
+ static void updateOffset(const TType& parentType, const TType& memberType, int& offset, int& memberSize);
+ static int getOffset(const TType& type, int index);
+ static int getBlockSize(const TType& blockType);
+ static int computeBufferReferenceTypeSize(const TType&);
+ bool promote(TIntermOperator*);
+ void setNanMinMaxClamp(bool setting) { nanMinMaxClamp = setting; }
+ bool getNanMinMaxClamp() const { return nanMinMaxClamp; }
+
+ void setSourceFile(const char* file) { if (file != nullptr) sourceFile = file; }
+ const std::string& getSourceFile() const { return sourceFile; }
+ void addSourceText(const char* text, size_t len) { sourceText.append(text, len); }
+ const std::string& getSourceText() const { return sourceText; }
+ const std::map<std::string, std::string>& getIncludeText() const { return includeText; }
+ void addIncludeText(const char* name, const char* text, size_t len) { includeText[name].assign(text,len); }
+ void addProcesses(const std::vector<std::string>& p)
+ {
+ for (int i = 0; i < (int)p.size(); ++i)
+ processes.addProcess(p[i]);
+ }
+ void addProcess(const std::string& process) { processes.addProcess(process); }
+ void addProcessArgument(const std::string& arg) { processes.addArgument(arg); }
+ const std::vector<std::string>& getProcesses() const { return processes.getProcesses(); }
+
+ // Certain explicit conversions are allowed conditionally
+#ifdef GLSLANG_WEB
+ bool getArithemeticInt8Enabled() const { return false; }
+ bool getArithemeticInt16Enabled() const { return false; }
+ bool getArithemeticFloat16Enabled() const { return false; }
+#else
+ bool getArithemeticInt8Enabled() const {
+ return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int8);
+ }
+ bool getArithemeticInt16Enabled() const {
+ return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ extensionRequested(E_GL_AMD_gpu_shader_int16) ||
+ extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_int16);
+ }
+
+ bool getArithemeticFloat16Enabled() const {
+ return extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types) ||
+ extensionRequested(E_GL_AMD_gpu_shader_half_float) ||
+ extensionRequested(E_GL_EXT_shader_explicit_arithmetic_types_float16);
+ }
+#endif
protected:
TIntermSymbol* addSymbol(int Id, const TString&, const TType&, const TConstUnionArray&, TIntermTyped* subtree, const TSourceLoc&);
@@ -796,13 +881,21 @@ protected:
bool specConstantPropagates(const TIntermTyped&, const TIntermTyped&);
void performTextureUpgradeAndSamplerRemovalTransformation(TIntermNode* root);
bool isConversionAllowed(TOperator op, TIntermTyped* node) const;
- TIntermTyped* createConversion(TBasicType convertTo, TIntermTyped* node) const;
std::tuple<TBasicType, TBasicType> getConversionDestinatonType(TBasicType type0, TBasicType type1, TOperator op) const;
+
+ // JohnK: I think this function should go away.
+ // This data structure is just a log to pass on to back ends.
+ // Versioning and extensions are handled in Version.cpp, with a rich
+ // set of functions for querying stages, versions, extension enable/disabled, etc.
+#ifdef GLSLANG_WEB
+ bool extensionRequested(const char *extension) const { return false; }
+#else
bool extensionRequested(const char *extension) const {return requestedExtensions.find(extension) != requestedExtensions.end();}
+#endif
+
static const char* getResourceName(TResourceType);
const EShLanguage language; // stage, known at construction time
- EShSource source; // source language, known a bit later
std::string entryPointName;
std::string entryPointMangledName;
typedef std::list<TCall> TGraph;
@@ -818,6 +911,20 @@ protected:
int numErrors;
int numPushConstants;
bool recursive;
+ bool invertY;
+ bool useStorageBuffer;
+ bool nanMinMaxClamp; // true if desiring min/max/clamp to favor non-NaN over NaN
+ bool depthReplacing;
+ int localSize[3];
+ bool localSizeNotDefault[3];
+ int localSizeSpecId[3];
+#ifndef GLSLANG_WEB
+public:
+ const char* const implicitThisName;
+ const char* const implicitCounterName;
+protected:
+ EShSource source; // source language, known a bit later
+ bool useVulkanMemoryModel;
int invocations;
int vertices;
TLayoutGeometry inputPrimitive;
@@ -826,27 +933,22 @@ protected:
bool originUpperLeft;
TVertexSpacing vertexSpacing;
TVertexOrder vertexOrder;
+ TInterlockOrdering interlockOrdering;
bool pointMode;
- int localSize[3];
- int localSizeSpecId[3];
bool earlyFragmentTests;
bool postDepthCoverage;
TLayoutDepth depthLayout;
- bool depthReplacing;
bool hlslFunctionality1;
int blendEquations; // an 'or'ing of masks of shifts of TBlendEquationShift
bool xfbMode;
std::vector<TXfbBuffer> xfbBuffers; // all the data we need to track per xfb buffer
bool multiStream;
-
-#ifdef NV_EXTENSIONS
bool layoutOverrideCoverage;
bool geoPassthroughEXT;
int numShaderRecordNVBlocks;
ComputeDerivativeMode computeDerivativeMode;
int primitives;
int numTaskNVBlocks;
-#endif
// Base shift values
std::array<unsigned int, EResCount> shiftBinding;
@@ -857,23 +959,29 @@ protected:
std::vector<std::string> resourceSetBinding;
bool autoMapBindings;
bool autoMapLocations;
- bool invertY;
bool flattenUniformArrays;
bool useUnknownFormat;
bool hlslOffsets;
- bool useStorageBuffer;
- bool useVulkanMemoryModel;
bool hlslIoMapping;
bool useVariablePointers;
- std::set<TString> ioAccessed; // set of names of statically read/written I/O that might need extra checking
- std::vector<TIoRange> usedIo[4]; // sets of used locations, one for each of in, out, uniform, and buffers
- std::vector<TOffsetRange> usedAtomics; // sets of bindings used by atomic counters
- std::unordered_set<int> usedConstantId; // specialization constant ids used
std::set<TString> semanticNameSet;
EShTextureSamplerTransformMode textureSamplerTransformMode;
+ bool needToLegalize;
+ bool binaryDoubleOutput;
+ bool usePhysicalStorageBuffer;
+
+ std::unordered_map<std::string, int> uniformLocationOverrides;
+ int uniformLocationBase;
+#endif
+
+ std::unordered_set<int> usedConstantId; // specialization constant ids used
+ std::vector<TOffsetRange> usedAtomics; // sets of bindings used by atomic counters
+ std::vector<TIoRange> usedIo[4]; // sets of used locations, one for each of in, out, uniform, and buffers
+ // set of names of statically read/written I/O that might need extra checking
+ std::set<TString> ioAccessed;
// source code of shader, useful as part of debug information
std::string sourceFile;
std::string sourceText;
@@ -884,13 +992,6 @@ protected:
// for OpModuleProcessed, or equivalent
TProcesses processes;
- bool needToLegalize;
- bool binaryDoubleOutput;
- bool usePhysicalStorageBuffer;
-
- std::unordered_map<std::string, int> uniformLocationOverrides;
- int uniformLocationBase;
-
private:
void operator=(TIntermediate&); // prevent assignments
};
diff --git a/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h b/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h
index 02af76a8a3..aa1964fc2e 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/parseVersions.h
@@ -57,26 +57,91 @@ public:
TParseVersions(TIntermediate& interm, int version, EProfile profile,
const SpvVersion& spvVersion, EShLanguage language, TInfoSink& infoSink,
bool forwardCompatible, EShMessages messages)
- : infoSink(infoSink), version(version), profile(profile), language(language),
- spvVersion(spvVersion), forwardCompatible(forwardCompatible),
- intermediate(interm), messages(messages), numErrors(0), currentScanner(0) { }
+ :
+#ifndef GLSLANG_WEB
+ forwardCompatible(forwardCompatible),
+ profile(profile),
+#endif
+ infoSink(infoSink), version(version),
+ language(language),
+ spvVersion(spvVersion),
+ intermediate(interm), messages(messages), numErrors(0), currentScanner(0) { }
virtual ~TParseVersions() { }
+ void requireStage(const TSourceLoc&, EShLanguageMask, const char* featureDesc);
+ void requireStage(const TSourceLoc&, EShLanguage, const char* featureDesc);
+#ifdef GLSLANG_WEB
+ const EProfile profile = EEsProfile;
+ bool isEsProfile() const { return true; }
+ void requireProfile(const TSourceLoc& loc, int profileMask, const char* featureDesc)
+ {
+ if (! (EEsProfile & profileMask))
+ error(loc, "not supported with this profile:", featureDesc, ProfileName(profile));
+ }
+ void profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, int numExtensions,
+ const char* const extensions[], const char* featureDesc)
+ {
+ if ((EEsProfile & profileMask) && (minVersion == 0 || version < minVersion))
+ error(loc, "not supported for this version or the enabled extensions", featureDesc, "");
+ }
+ void profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, const char* extension,
+ const char* featureDesc)
+ {
+ profileRequires(loc, profileMask, minVersion, extension ? 1 : 0, &extension, featureDesc);
+ }
+ void initializeExtensionBehavior() { }
+ void checkDeprecated(const TSourceLoc&, int queryProfiles, int depVersion, const char* featureDesc) { }
+ void requireNotRemoved(const TSourceLoc&, int queryProfiles, int removedVersion, const char* featureDesc) { }
+ void requireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[],
+ const char* featureDesc) { }
+ void ppRequireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[],
+ const char* featureDesc) { }
+ TExtensionBehavior getExtensionBehavior(const char*) { return EBhMissing; }
+ bool extensionTurnedOn(const char* const extension) { return false; }
+ bool extensionsTurnedOn(int numExtensions, const char* const extensions[]) { return false; }
+ void updateExtensionBehavior(int line, const char* const extension, const char* behavior) { }
+ void updateExtensionBehavior(const char* const extension, TExtensionBehavior) { }
+ void checkExtensionStage(const TSourceLoc&, const char* const extension) { }
+ void fullIntegerCheck(const TSourceLoc&, const char* op) { }
+ void doubleCheck(const TSourceLoc&, const char* op) { }
+ bool float16Arithmetic() { return false; }
+ void requireFloat16Arithmetic(const TSourceLoc& loc, const char* op, const char* featureDesc) { }
+ bool int16Arithmetic() { return false; }
+ void requireInt16Arithmetic(const TSourceLoc& loc, const char* op, const char* featureDesc) { }
+ bool int8Arithmetic() { return false; }
+ void requireInt8Arithmetic(const TSourceLoc& loc, const char* op, const char* featureDesc) { }
+ void int64Check(const TSourceLoc&, const char* op, bool builtIn = false) { }
+ void explicitFloat32Check(const TSourceLoc&, const char* op, bool builtIn = false) { }
+ void explicitFloat64Check(const TSourceLoc&, const char* op, bool builtIn = false) { }
+ bool relaxedErrors() const { return false; }
+ bool suppressWarnings() const { return true; }
+ bool isForwardCompatible() const { return false; }
+#else
+ bool forwardCompatible; // true if errors are to be given for use of deprecated features
+ EProfile profile; // the declared profile in the shader (core by default)
+ bool isEsProfile() const { return profile == EEsProfile; }
+ void requireProfile(const TSourceLoc& loc, int profileMask, const char* featureDesc);
+ void profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, int numExtensions,
+ const char* const extensions[], const char* featureDesc);
+ void profileRequires(const TSourceLoc& loc, int profileMask, int minVersion, const char* extension,
+ const char* featureDesc);
virtual void initializeExtensionBehavior();
- virtual void requireProfile(const TSourceLoc&, int queryProfiles, const char* featureDesc);
- virtual void profileRequires(const TSourceLoc&, int queryProfiles, int minVersion, int numExtensions, const char* const extensions[], const char* featureDesc);
- virtual void profileRequires(const TSourceLoc&, int queryProfiles, int minVersion, const char* const extension, const char* featureDesc);
- virtual void requireStage(const TSourceLoc&, EShLanguageMask, const char* featureDesc);
- virtual void requireStage(const TSourceLoc&, EShLanguage, const char* featureDesc);
virtual void checkDeprecated(const TSourceLoc&, int queryProfiles, int depVersion, const char* featureDesc);
virtual void requireNotRemoved(const TSourceLoc&, int queryProfiles, int removedVersion, const char* featureDesc);
- virtual void unimplemented(const TSourceLoc&, const char* featureDesc);
- virtual void requireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc);
- virtual void ppRequireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc);
+ virtual void requireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[],
+ const char* featureDesc);
+ virtual void ppRequireExtensions(const TSourceLoc&, int numExtensions, const char* const extensions[],
+ const char* featureDesc);
virtual TExtensionBehavior getExtensionBehavior(const char*);
virtual bool extensionTurnedOn(const char* const extension);
virtual bool extensionsTurnedOn(int numExtensions, const char* const extensions[]);
virtual void updateExtensionBehavior(int line, const char* const extension, const char* behavior);
+ virtual void updateExtensionBehavior(const char* const extension, TExtensionBehavior);
+ virtual bool checkExtensionsRequested(const TSourceLoc&, int numExtensions, const char* const extensions[],
+ const char* featureDesc);
+ virtual void checkExtensionStage(const TSourceLoc&, const char* const extension);
virtual void fullIntegerCheck(const TSourceLoc&, const char* op);
+
+ virtual void unimplemented(const TSourceLoc&, const char* featureDesc);
virtual void doubleCheck(const TSourceLoc&, const char* op);
virtual void float16Check(const TSourceLoc&, const char* op, bool builtIn = false);
virtual void float16ScalarVectorCheck(const TSourceLoc&, const char* op, bool builtIn = false);
@@ -88,24 +153,35 @@ public:
virtual void int8ScalarVectorCheck(const TSourceLoc&, const char* op, bool builtIn = false);
virtual bool int8Arithmetic();
virtual void requireInt8Arithmetic(const TSourceLoc& loc, const char* op, const char* featureDesc);
-#ifdef AMD_EXTENSIONS
virtual void float16OpaqueCheck(const TSourceLoc&, const char* op, bool builtIn = false);
-#endif
virtual void int64Check(const TSourceLoc&, const char* op, bool builtIn = false);
virtual void explicitInt8Check(const TSourceLoc&, const char* op, bool builtIn = false);
virtual void explicitInt16Check(const TSourceLoc&, const char* op, bool builtIn = false);
virtual void explicitInt32Check(const TSourceLoc&, const char* op, bool builtIn = false);
virtual void explicitFloat32Check(const TSourceLoc&, const char* op, bool builtIn = false);
virtual void explicitFloat64Check(const TSourceLoc&, const char* op, bool builtIn = false);
+ virtual void fcoopmatCheck(const TSourceLoc&, const char* op, bool builtIn = false);
+ virtual void intcoopmatCheck(const TSourceLoc&, const char *op, bool builtIn = false);
+ bool relaxedErrors() const { return (messages & EShMsgRelaxedErrors) != 0; }
+ bool suppressWarnings() const { return (messages & EShMsgSuppressWarnings) != 0; }
+ bool isForwardCompatible() const { return forwardCompatible; }
+#endif // GLSLANG_WEB
virtual void spvRemoved(const TSourceLoc&, const char* op);
virtual void vulkanRemoved(const TSourceLoc&, const char* op);
virtual void requireVulkan(const TSourceLoc&, const char* op);
virtual void requireSpv(const TSourceLoc&, const char* op);
- virtual bool checkExtensionsRequested(const TSourceLoc&, int numExtensions, const char* const extensions[], const char* featureDesc);
- virtual void updateExtensionBehavior(const char* const extension, TExtensionBehavior);
- virtual void checkExtensionStage(const TSourceLoc&, const char* const extension);
- virtual void fcoopmatCheck(const TSourceLoc&, const char* op, bool builtIn = false);
+
+#if defined(GLSLANG_WEB) && !defined(GLSLANG_WEB_DEVEL)
+ void C_DECL error(const TSourceLoc&, const char* szReason, const char* szToken,
+ const char* szExtraInfoFormat, ...) { addError(); }
+ void C_DECL warn(const TSourceLoc&, const char* szReason, const char* szToken,
+ const char* szExtraInfoFormat, ...) { }
+ void C_DECL ppError(const TSourceLoc&, const char* szReason, const char* szToken,
+ const char* szExtraInfoFormat, ...) { addError(); }
+ void C_DECL ppWarn(const TSourceLoc&, const char* szReason, const char* szToken,
+ const char* szExtraInfoFormat, ...) { }
+#else
virtual void C_DECL error(const TSourceLoc&, const char* szReason, const char* szToken,
const char* szExtraInfoFormat, ...) = 0;
virtual void C_DECL warn(const TSourceLoc&, const char* szReason, const char* szToken,
@@ -114,6 +190,7 @@ public:
const char* szExtraInfoFormat, ...) = 0;
virtual void C_DECL ppWarn(const TSourceLoc&, const char* szReason, const char* szToken,
const char* szExtraInfoFormat, ...) = 0;
+#endif
void addError() { ++numErrors; }
int getNumErrors() const { return numErrors; }
@@ -127,20 +204,20 @@ public:
void setCurrentString(int string) { currentScanner->setString(string); }
void getPreamble(std::string&);
- bool relaxedErrors() const { return (messages & EShMsgRelaxedErrors) != 0; }
- bool suppressWarnings() const { return (messages & EShMsgSuppressWarnings) != 0; }
+#ifdef ENABLE_HLSL
bool isReadingHLSL() const { return (messages & EShMsgReadHlsl) == EShMsgReadHlsl; }
bool hlslEnable16BitTypes() const { return (messages & EShMsgHlslEnable16BitTypes) != 0; }
bool hlslDX9Compatible() const { return (messages & EShMsgHlslDX9Compatible) != 0; }
+#else
+ bool isReadingHLSL() const { return false; }
+#endif
TInfoSink& infoSink;
// compilation mode
int version; // version, updated by #version in the shader
- EProfile profile; // the declared profile in the shader (core by default)
EShLanguage language; // really the stage
SpvVersion spvVersion;
- bool forwardCompatible; // true if errors are to be given for use of deprecated features
TIntermediate& intermediate; // helper for making and hooking up pieces of the parse tree
protected:
diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp
index c74e44f0fd..d7ff485c0a 100644..100755
--- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/Pp.cpp
@@ -545,7 +545,7 @@ int TPpContext::evalToToken(int token, bool shortCircuit, int& res, bool& err, T
case MacroExpandStarted:
break;
case MacroExpandUndef:
- if (! shortCircuit && parseContext.profile == EEsProfile) {
+ if (! shortCircuit && parseContext.isEsProfile()) {
const char* message = "undefined macro in expression not allowed in es profile";
if (parseContext.relaxedErrors())
parseContext.ppWarn(ppToken->loc, message, "preprocessor evaluation", ppToken->name);
@@ -722,6 +722,7 @@ int TPpContext::CPPline(TPpToken* ppToken)
parseContext.setCurrentLine(lineRes);
if (token != '\n') {
+#ifndef GLSLANG_WEB
if (token == PpAtomConstString) {
parseContext.ppRequireExtensions(directiveLoc, 1, &E_GL_GOOGLE_cpp_style_line_directive, "filename-based #line");
// We need to save a copy of the string instead of pointing
@@ -731,7 +732,9 @@ int TPpContext::CPPline(TPpToken* ppToken)
parseContext.setCurrentSourceName(sourceName);
hasFile = true;
token = scanToken(ppToken);
- } else {
+ } else
+#endif
+ {
token = eval(token, MIN_PRECEDENCE, false, fileRes, fileErr, ppToken);
if (! fileErr) {
parseContext.setCurrentString(fileRes);
@@ -792,10 +795,8 @@ int TPpContext::CPPpragma(TPpToken* ppToken)
case PpAtomConstUint:
case PpAtomConstInt64:
case PpAtomConstUint64:
-#ifdef AMD_EXTENSIONS
case PpAtomConstInt16:
case PpAtomConstUint16:
-#endif
case PpAtomConstFloat:
case PpAtomConstDouble:
case PpAtomConstFloat16:
@@ -954,18 +955,20 @@ int TPpContext::readCPPline(TPpToken* ppToken)
case PpAtomIfndef:
token = CPPifdef(0, ppToken);
break;
+ case PpAtomLine:
+ token = CPPline(ppToken);
+ break;
+#ifndef GLSLANG_WEB
case PpAtomInclude:
if(!parseContext.isReadingHLSL()) {
parseContext.ppRequireExtensions(ppToken->loc, 1, &E_GL_GOOGLE_include_directive, "#include");
}
token = CPPinclude(ppToken);
break;
- case PpAtomLine:
- token = CPPline(ppToken);
- break;
case PpAtomPragma:
token = CPPpragma(ppToken);
break;
+#endif
case PpAtomUndef:
token = CPPundef(ppToken);
break;
diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp
index cc003a8d12..cc003a8d12 100644..100755
--- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpContext.cpp
diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp
index f6f52d7d55..c293af3c1e 100644..100755
--- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpScanner.cpp
@@ -142,6 +142,7 @@ int TPpContext::lFloatConst(int len, int ch, TPpToken* ppToken)
ch = getChar();
int firstDecimal = len;
+#ifdef ENABLE_HLSL
// 1.#INF or -1.#INF
if (ch == '#' && (ifdepth > 0 || parseContext.intermediate.getSource() == EShSourceHlsl)) {
if ((len < 2) ||
@@ -169,6 +170,7 @@ int TPpContext::lFloatConst(int len, int ch, TPpToken* ppToken)
}
}
}
+#endif
// Consume leading-zero digits after the decimal point
while (ch == '0') {
@@ -257,6 +259,7 @@ int TPpContext::lFloatConst(int len, int ch, TPpToken* ppToken)
// Suffix:
bool isDouble = false;
bool isFloat16 = false;
+#ifndef GLSLANG_WEB
if (ch == 'l' || ch == 'L') {
if (ifdepth == 0 && parseContext.intermediate.getSource() == EShSourceGlsl)
parseContext.doubleCheck(ppToken->loc, "double floating-point suffix");
@@ -295,11 +298,15 @@ int TPpContext::lFloatConst(int len, int ch, TPpToken* ppToken)
saveName(ch);
isFloat16 = true;
}
- } else if (ch == 'f' || ch == 'F') {
+ } else
+#endif
+ if (ch == 'f' || ch == 'F') {
+#ifndef GLSLANG_WEB
if (ifdepth == 0)
parseContext.profileRequires(ppToken->loc, EEsProfile, 300, nullptr, "floating-point suffix");
if (ifdepth == 0 && !parseContext.relaxedErrors())
parseContext.profileRequires(ppToken->loc, ~EEsProfile, 120, nullptr, "floating-point suffix");
+#endif
if (ifdepth == 0 && !hasDecimalOrExponent)
parseContext.ppError(ppToken->loc, "float literal needs a decimal point or exponent", "", "");
saveName(ch);
@@ -468,9 +475,7 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
static const int Num_Int64_Extensions = sizeof(Int64_Extensions) / sizeof(Int64_Extensions[0]);
static const char* const Int16_Extensions[] = {
-#ifdef AMD_EXTENSIONS
E_GL_AMD_gpu_shader_int16,
-#endif
E_GL_EXT_shader_explicit_arithmetic_types,
E_GL_EXT_shader_explicit_arithmetic_types_int16 };
static const int Num_Int16_Extensions = sizeof(Int16_Extensions) / sizeof(Int16_Extensions[0]);
@@ -579,6 +584,7 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
ppToken->name[len++] = (char)ch;
isUnsigned = true;
+#ifndef GLSLANG_WEB
int nextCh = getch();
if (nextCh == 'l' || nextCh == 'L') {
if (len < MaxTokenLength)
@@ -587,7 +593,6 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
} else
ungetch();
-#ifdef AMD_EXTENSIONS
nextCh = getch();
if ((nextCh == 's' || nextCh == 'S') &&
pp->parseContext.intermediate.getSource() == EShSourceGlsl) {
@@ -596,12 +601,10 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
isInt16 = true;
} else
ungetch();
-#endif
} else if (ch == 'l' || ch == 'L') {
if (len < MaxTokenLength)
ppToken->name[len++] = (char)ch;
isInt64 = true;
-#ifdef AMD_EXTENSIONS
} else if ((ch == 's' || ch == 'S') &&
pp->parseContext.intermediate.getSource() == EShSourceGlsl) {
if (len < MaxTokenLength)
@@ -689,6 +692,7 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
ppToken->name[len++] = (char)ch;
isUnsigned = true;
+#ifndef GLSLANG_WEB
int nextCh = getch();
if (nextCh == 'l' || nextCh == 'L') {
if (len < MaxTokenLength)
@@ -697,7 +701,6 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
} else
ungetch();
-#ifdef AMD_EXTENSIONS
nextCh = getch();
if ((nextCh == 's' || nextCh == 'S') &&
pp->parseContext.intermediate.getSource() == EShSourceGlsl) {
@@ -706,12 +709,10 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
isInt16 = true;
} else
ungetch();
-#endif
} else if (ch == 'l' || ch == 'L') {
if (len < MaxTokenLength)
ppToken->name[len++] = (char)ch;
isInt64 = true;
-#ifdef AMD_EXTENSIONS
} else if ((ch == 's' || ch == 'S') &&
pp->parseContext.intermediate.getSource() == EShSourceGlsl) {
if (len < MaxTokenLength)
@@ -780,6 +781,7 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
ppToken->name[len++] = (char)ch;
isUnsigned = true;
+#ifndef GLSLANG_WEB
int nextCh = getch();
if (nextCh == 'l' || nextCh == 'L') {
if (len < MaxTokenLength)
@@ -788,7 +790,6 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
} else
ungetch();
-#ifdef AMD_EXTENSIONS
nextCh = getch();
if ((nextCh == 's' || nextCh == 'S') &&
pp->parseContext.intermediate.getSource() == EShSourceGlsl) {
@@ -797,12 +798,10 @@ int TPpContext::tStringInput::scan(TPpToken* ppToken)
isInt16 = true;
} else
ungetch();
-#endif
} else if (ch == 'l' || ch == 'L') {
if (len < MaxTokenLength)
ppToken->name[len++] = (char)ch;
isInt64 = true;
-#ifdef AMD_EXTENSIONS
} else if ((ch == 's' || ch == 'S') &&
pp->parseContext.intermediate.getSource() == EShSourceGlsl) {
if (len < MaxTokenLength)
diff --git a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp
index ac9d8ac351..7ed58703f2 100644..100755
--- a/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/preprocessor/PpTokens.cpp
@@ -116,6 +116,7 @@ int TPpContext::TokenStream::getToken(TParseContextBase& parseContext, TPpToken
int atom = stream[currentPos++].get(*ppToken);
ppToken->loc = parseContext.getCurrentLoc();
+#ifndef GLSLANG_WEB
// Check for ##, unless the current # is the last character
if (atom == '#') {
if (peekToken('#')) {
@@ -125,6 +126,7 @@ int TPpContext::TokenStream::getToken(TParseContextBase& parseContext, TPpToken
atom = PpAtomPaste;
}
}
+#endif
return atom;
}
diff --git a/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp b/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp
index ae95688ae8..83a3230f51 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/propagateNoContraction.cpp
@@ -37,6 +37,8 @@
// propagate the 'noContraction' qualifier.
//
+#ifndef GLSLANG_WEB
+
#include "propagateNoContraction.h"
#include <cstdlib>
@@ -79,7 +81,7 @@ typedef std::unordered_set<glslang::TIntermBranch*> ReturnBranchNodeSet;
// the node has 'noContraction' qualifier, otherwise false.
bool isPreciseObjectNode(glslang::TIntermTyped* node)
{
- return node->getType().getQualifier().noContraction;
+ return node->getType().getQualifier().isNoContraction();
}
// Returns true if the opcode is a dereferencing one.
@@ -864,3 +866,5 @@ void PropagateNoContraction(const glslang::TIntermediate& intermediate)
}
}
};
+
+#endif // GLSLANG_WEB \ No newline at end of file
diff --git a/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp b/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp
index a09a04880e..b09367113c 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp
+++ b/thirdparty/glslang/glslang/MachineIndependent/reflection.cpp
@@ -33,6 +33,8 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+#ifndef GLSLANG_WEB
+
#include "../Include/Common.h"
#include "reflection.h"
#include "LiveTraverser.h"
@@ -110,6 +112,10 @@ public:
TReflection::TMapIndexToReflection &ioItems =
input ? reflection.indexToPipeInput : reflection.indexToPipeOutput;
+
+ TReflection::TNameToIndex &ioMapper =
+ input ? reflection.pipeInNameToIndex : reflection.pipeOutNameToIndex;
+
if (reflection.options & EShReflectionUnwrapIOBlocks) {
bool anonymous = IsAnonymous(name);
@@ -127,12 +133,13 @@ public:
blowUpIOAggregate(input, baseName, type);
}
} else {
- TReflection::TNameToIndex::const_iterator it = reflection.nameToIndex.find(name.c_str());
- if (it == reflection.nameToIndex.end()) {
- reflection.nameToIndex[name.c_str()] = (int)ioItems.size();
+ TReflection::TNameToIndex::const_iterator it = ioMapper.find(name.c_str());
+ if (it == ioMapper.end()) {
+ // seperate pipe i/o params from uniforms and blocks
+ // in is only for input in first stage as out is only for last stage. check traverse in call stack.
+ ioMapper[name.c_str()] = static_cast<int>(ioItems.size());
ioItems.push_back(
TObjectReflection(name.c_str(), type, 0, mapToGlType(type), mapToGlArraySize(type), 0));
-
EShLanguageMask& stages = ioItems.back().stages;
stages = static_cast<EShLanguageMask>(stages | 1 << intermediate.getStage());
} else {
@@ -396,7 +403,7 @@ public:
topLevelArrayStride = variables.back().arrayStride;
}
- if ((reflection.options & EShReflectionSeparateBuffers) && terminalType->getBasicType() == EbtAtomicUint)
+ if ((reflection.options & EShReflectionSeparateBuffers) && terminalType->isAtomic())
reflection.atomicCounterUniformIndices.push_back(uniformIndex);
variables.back().topLevelArrayStride = topLevelArrayStride;
@@ -554,15 +561,18 @@ public:
bool blockParent = (base->getType().getBasicType() == EbtBlock && base->getQualifier().storage == EvqBuffer);
if (strictArraySuffix && blockParent) {
- const TTypeList& typeList = *base->getType().getStruct();
+ TType structDerefType(base->getType(), 0);
+
+ const TType &structType = base->getType().isArray() ? structDerefType : base->getType();
+ const TTypeList& typeList = *structType.getStruct();
TVector<int> memberOffsets;
memberOffsets.resize(typeList.size());
- getOffsets(base->getType(), memberOffsets);
+ getOffsets(structType, memberOffsets);
for (int i = 0; i < (int)typeList.size(); ++i) {
- TType derefType(base->getType(), i);
+ TType derefType(structType, i);
TString name = baseName;
if (name.size() > 0)
name.append(".");
@@ -573,7 +583,7 @@ public:
if (derefType.isArray() && derefType.isStruct()) {
name.append("[0]");
blowUpActiveAggregate(TType(derefType, 0), name, derefs, derefs.end(), memberOffsets[i],
- blockIndex, 0, getArrayStride(base->getType(), derefType),
+ blockIndex, 0, getArrayStride(structType, derefType),
base->getQualifier().storage, false);
} else {
blowUpActiveAggregate(derefType, name, derefs, derefs.end(), memberOffsets[i], blockIndex,
@@ -701,7 +711,6 @@ public:
case EsdBuffer:
return GL_SAMPLER_BUFFER;
}
-#ifdef AMD_EXTENSIONS
case EbtFloat16:
switch ((int)sampler.dim) {
case Esd1D:
@@ -730,7 +739,6 @@ public:
case EsdBuffer:
return GL_FLOAT16_SAMPLER_BUFFER_AMD;
}
-#endif
case EbtInt:
switch ((int)sampler.dim) {
case Esd1D:
@@ -793,7 +801,6 @@ public:
case EsdBuffer:
return GL_IMAGE_BUFFER;
}
-#ifdef AMD_EXTENSIONS
case EbtFloat16:
switch ((int)sampler.dim) {
case Esd1D:
@@ -812,7 +819,6 @@ public:
case EsdBuffer:
return GL_FLOAT16_IMAGE_BUFFER_AMD;
}
-#endif
case EbtInt:
switch ((int)sampler.dim) {
case Esd1D:
@@ -878,9 +884,7 @@ public:
switch (type.getBasicType()) {
case EbtFloat: return GL_FLOAT_VEC2 + offset;
case EbtDouble: return GL_DOUBLE_VEC2 + offset;
-#ifdef AMD_EXTENSIONS
case EbtFloat16: return GL_FLOAT16_VEC2_NV + offset;
-#endif
case EbtInt: return GL_INT_VEC2 + offset;
case EbtUint: return GL_UNSIGNED_INT_VEC2 + offset;
case EbtInt64: return GL_INT64_ARB + offset;
@@ -940,7 +944,6 @@ public:
default: return 0;
}
}
-#ifdef AMD_EXTENSIONS
case EbtFloat16:
switch (type.getMatrixCols()) {
case 2:
@@ -965,7 +968,6 @@ public:
default: return 0;
}
}
-#endif
default:
return 0;
}
@@ -974,9 +976,7 @@ public:
switch (type.getBasicType()) {
case EbtFloat: return GL_FLOAT;
case EbtDouble: return GL_DOUBLE;
-#ifdef AMD_EXTENSIONS
case EbtFloat16: return GL_FLOAT16_NV;
-#endif
case EbtInt: return GL_INT;
case EbtUint: return GL_UNSIGNED_INT;
case EbtInt64: return GL_INT64_ARB;
@@ -1093,6 +1093,7 @@ void TReflection::buildAttributeReflection(EShLanguage stage, const TIntermediat
// build counter block index associations for buffers
void TReflection::buildCounterIndices(const TIntermediate& intermediate)
{
+#ifdef ENABLE_HLSL
// search for ones that have counters
for (int i = 0; i < int(indexToUniformBlock.size()); ++i) {
const TString counterName(intermediate.addCounterBufferName(indexToUniformBlock[i].name).c_str());
@@ -1101,6 +1102,7 @@ void TReflection::buildCounterIndices(const TIntermediate& intermediate)
if (index >= 0)
indexToUniformBlock[i].counterIndex = index;
}
+#endif
}
// build Shader Stages mask for all uniforms
@@ -1198,3 +1200,5 @@ void TReflection::dump()
}
} // end namespace glslang
+
+#endif // GLSLANG_WEB
diff --git a/thirdparty/glslang/glslang/MachineIndependent/reflection.h b/thirdparty/glslang/glslang/MachineIndependent/reflection.h
index 44b17a05ad..efdc8934fb 100644
--- a/thirdparty/glslang/glslang/MachineIndependent/reflection.h
+++ b/thirdparty/glslang/glslang/MachineIndependent/reflection.h
@@ -33,6 +33,8 @@
// POSSIBILITY OF SUCH DAMAGE.
//
+#ifndef GLSLANG_WEB
+
#ifndef _REFLECTION_INCLUDED
#define _REFLECTION_INCLUDED
@@ -150,6 +152,20 @@ public:
// see getIndex(const char*)
int getIndex(const TString& name) const { return getIndex(name.c_str()); }
+
+ // for mapping any name to its index (only pipe input/output names)
+ int getPipeIOIndex(const char* name, const bool inOrOut) const
+ {
+ TNameToIndex::const_iterator it = inOrOut ? pipeInNameToIndex.find(name) : pipeOutNameToIndex.find(name);
+ if (it == (inOrOut ? pipeInNameToIndex.end() : pipeOutNameToIndex.end()))
+ return -1;
+ else
+ return it->second;
+ }
+
+ // see gePipeIOIndex(const char*, const bool)
+ int getPipeIOIndex(const TString& name, const bool inOrOut) const { return getPipeIOIndex(name.c_str(), inOrOut); }
+
// Thread local size
unsigned getLocalSize(int dim) const { return dim <= 2 ? localSize[dim] : 0; }
@@ -187,6 +203,8 @@ protected:
TObjectReflection badReflection; // return for queries of -1 or generally out of range; has expected descriptions with in it for this
TNameToIndex nameToIndex; // maps names to indexes; can hold all types of data: uniform/buffer and which function names have been processed
+ TNameToIndex pipeInNameToIndex; // maps pipe in names to indexes, this is a fix to seperate pipe I/O from uniforms and buffers.
+ TNameToIndex pipeOutNameToIndex; // maps pipe out names to indexes, this is a fix to seperate pipe I/O from uniforms and buffers.
TMapIndexToReflection indexToUniform;
TMapIndexToReflection indexToUniformBlock;
TMapIndexToReflection indexToBufferVariable;
@@ -201,3 +219,5 @@ protected:
} // end namespace glslang
#endif // _REFLECTION_INCLUDED
+
+#endif // GLSLANG_WEB \ No newline at end of file
diff --git a/thirdparty/glslang/glslang/OSDependent/Web/glslang.after.js b/thirdparty/glslang/glslang/OSDependent/Web/glslang.after.js
new file mode 100644
index 0000000000..c2cfc35a48
--- /dev/null
+++ b/thirdparty/glslang/glslang/OSDependent/Web/glslang.after.js
@@ -0,0 +1,26 @@
+export default (() => {
+ const initialize = () => {
+ return new Promise(resolve => {
+ Module({
+ locateFile() {
+ const i = import.meta.url.lastIndexOf('/')
+ return import.meta.url.substring(0, i) + '/glslang.wasm';
+ },
+ onRuntimeInitialized() {
+ resolve({
+ compileGLSLZeroCopy: this.compileGLSLZeroCopy,
+ compileGLSL: this.compileGLSL,
+ });
+ },
+ });
+ });
+ };
+
+ let instance;
+ return () => {
+ if (!instance) {
+ instance = initialize();
+ }
+ return instance;
+ };
+})();
diff --git a/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp b/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp
new file mode 100644
index 0000000000..6cb93fe27e
--- /dev/null
+++ b/thirdparty/glslang/glslang/OSDependent/Web/glslang.js.cpp
@@ -0,0 +1,269 @@
+//
+// Copyright (C) 2019 Google, Inc.
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+//
+// Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//
+// Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+//
+// Neither the name of 3Dlabs Inc. Ltd. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+// POSSIBILITY OF SUCH DAMAGE.
+//
+
+#include <cstdio>
+#include <cstdint>
+#include <memory>
+
+#ifdef __EMSCRIPTEN__
+#include <emscripten.h>
+#endif
+
+#include "../../../SPIRV/GlslangToSpv.h"
+#include "../../../glslang/Public/ShaderLang.h"
+
+#ifndef __EMSCRIPTEN__
+#define EMSCRIPTEN_KEEPALIVE
+#endif
+
+const TBuiltInResource DefaultTBuiltInResource = {
+ /* .MaxLights = */ 32,
+ /* .MaxClipPlanes = */ 6,
+ /* .MaxTextureUnits = */ 32,
+ /* .MaxTextureCoords = */ 32,
+ /* .MaxVertexAttribs = */ 64,
+ /* .MaxVertexUniformComponents = */ 4096,
+ /* .MaxVaryingFloats = */ 64,
+ /* .MaxVertexTextureImageUnits = */ 32,
+ /* .MaxCombinedTextureImageUnits = */ 80,
+ /* .MaxTextureImageUnits = */ 32,
+ /* .MaxFragmentUniformComponents = */ 4096,
+ /* .MaxDrawBuffers = */ 32,
+ /* .MaxVertexUniformVectors = */ 128,
+ /* .MaxVaryingVectors = */ 8,
+ /* .MaxFragmentUniformVectors = */ 16,
+ /* .MaxVertexOutputVectors = */ 16,
+ /* .MaxFragmentInputVectors = */ 15,
+ /* .MinProgramTexelOffset = */ -8,
+ /* .MaxProgramTexelOffset = */ 7,
+ /* .MaxClipDistances = */ 8,
+ /* .MaxComputeWorkGroupCountX = */ 65535,
+ /* .MaxComputeWorkGroupCountY = */ 65535,
+ /* .MaxComputeWorkGroupCountZ = */ 65535,
+ /* .MaxComputeWorkGroupSizeX = */ 1024,
+ /* .MaxComputeWorkGroupSizeY = */ 1024,
+ /* .MaxComputeWorkGroupSizeZ = */ 64,
+ /* .MaxComputeUniformComponents = */ 1024,
+ /* .MaxComputeTextureImageUnits = */ 16,
+ /* .MaxComputeImageUniforms = */ 8,
+ /* .MaxComputeAtomicCounters = */ 8,
+ /* .MaxComputeAtomicCounterBuffers = */ 1,
+ /* .MaxVaryingComponents = */ 60,
+ /* .MaxVertexOutputComponents = */ 64,
+ /* .MaxGeometryInputComponents = */ 64,
+ /* .MaxGeometryOutputComponents = */ 128,
+ /* .MaxFragmentInputComponents = */ 128,
+ /* .MaxImageUnits = */ 8,
+ /* .MaxCombinedImageUnitsAndFragmentOutputs = */ 8,
+ /* .MaxCombinedShaderOutputResources = */ 8,
+ /* .MaxImageSamples = */ 0,
+ /* .MaxVertexImageUniforms = */ 0,
+ /* .MaxTessControlImageUniforms = */ 0,
+ /* .MaxTessEvaluationImageUniforms = */ 0,
+ /* .MaxGeometryImageUniforms = */ 0,
+ /* .MaxFragmentImageUniforms = */ 8,
+ /* .MaxCombinedImageUniforms = */ 8,
+ /* .MaxGeometryTextureImageUnits = */ 16,
+ /* .MaxGeometryOutputVertices = */ 256,
+ /* .MaxGeometryTotalOutputComponents = */ 1024,
+ /* .MaxGeometryUniformComponents = */ 1024,
+ /* .MaxGeometryVaryingComponents = */ 64,
+ /* .MaxTessControlInputComponents = */ 128,
+ /* .MaxTessControlOutputComponents = */ 128,
+ /* .MaxTessControlTextureImageUnits = */ 16,
+ /* .MaxTessControlUniformComponents = */ 1024,
+ /* .MaxTessControlTotalOutputComponents = */ 4096,
+ /* .MaxTessEvaluationInputComponents = */ 128,
+ /* .MaxTessEvaluationOutputComponents = */ 128,
+ /* .MaxTessEvaluationTextureImageUnits = */ 16,
+ /* .MaxTessEvaluationUniformComponents = */ 1024,
+ /* .MaxTessPatchComponents = */ 120,
+ /* .MaxPatchVertices = */ 32,
+ /* .MaxTessGenLevel = */ 64,
+ /* .MaxViewports = */ 16,
+ /* .MaxVertexAtomicCounters = */ 0,
+ /* .MaxTessControlAtomicCounters = */ 0,
+ /* .MaxTessEvaluationAtomicCounters = */ 0,
+ /* .MaxGeometryAtomicCounters = */ 0,
+ /* .MaxFragmentAtomicCounters = */ 8,
+ /* .MaxCombinedAtomicCounters = */ 8,
+ /* .MaxAtomicCounterBindings = */ 1,
+ /* .MaxVertexAtomicCounterBuffers = */ 0,
+ /* .MaxTessControlAtomicCounterBuffers = */ 0,
+ /* .MaxTessEvaluationAtomicCounterBuffers = */ 0,
+ /* .MaxGeometryAtomicCounterBuffers = */ 0,
+ /* .MaxFragmentAtomicCounterBuffers = */ 1,
+ /* .MaxCombinedAtomicCounterBuffers = */ 1,
+ /* .MaxAtomicCounterBufferSize = */ 16384,
+ /* .MaxTransformFeedbackBuffers = */ 4,
+ /* .MaxTransformFeedbackInterleavedComponents = */ 64,
+ /* .MaxCullDistances = */ 8,
+ /* .MaxCombinedClipAndCullDistances = */ 8,
+ /* .MaxSamples = */ 4,
+ /* .maxMeshOutputVerticesNV = */ 256,
+ /* .maxMeshOutputPrimitivesNV = */ 512,
+ /* .maxMeshWorkGroupSizeX_NV = */ 32,
+ /* .maxMeshWorkGroupSizeY_NV = */ 1,
+ /* .maxMeshWorkGroupSizeZ_NV = */ 1,
+ /* .maxTaskWorkGroupSizeX_NV = */ 32,
+ /* .maxTaskWorkGroupSizeY_NV = */ 1,
+ /* .maxTaskWorkGroupSizeZ_NV = */ 1,
+ /* .maxMeshViewCountNV = */ 4,
+
+ /* .limits = */ {
+ /* .nonInductiveForLoops = */ 1,
+ /* .whileLoops = */ 1,
+ /* .doWhileLoops = */ 1,
+ /* .generalUniformIndexing = */ 1,
+ /* .generalAttributeMatrixVectorIndexing = */ 1,
+ /* .generalVaryingIndexing = */ 1,
+ /* .generalSamplerIndexing = */ 1,
+ /* .generalVariableIndexing = */ 1,
+ /* .generalConstantMatrixVectorIndexing = */ 1,
+ }};
+
+static bool initialized = false;
+
+extern "C" {
+
+/*
+ * Takes in a GLSL shader as a string and converts it to SPIR-V in binary form.
+ *
+ * |glsl| Null-terminated string containing the shader to be converted.
+ * |stage_int| Magic number indicating the type of shader being processed.
+* Legal values are as follows:
+ * Vertex = 0
+ * Fragment = 4
+ * Compute = 5
+ * |gen_debug| Flag to indicate if debug information should be generated.
+ * |spirv| Output parameter for a pointer to the resulting SPIR-V data.
+ * |spirv_len| Output parameter for the length of the output binary buffer.
+ *
+ * Returns a void* pointer which, if not null, must be destroyed by
+ * destroy_output_buffer.o. (This is not the same pointer returned in |spirv|.)
+ * If null, the compilation failed.
+ */
+EMSCRIPTEN_KEEPALIVE
+void* convert_glsl_to_spirv(const char* glsl, int stage_int, bool gen_debug, uint32_t** spirv, size_t* spirv_len)
+{
+ if (glsl == nullptr) {
+ fprintf(stderr, "Input pointer null\n");
+ return nullptr;
+ }
+ if (spirv == nullptr || spirv_len == nullptr) {
+ fprintf(stderr, "Output pointer null\n");
+ return nullptr;
+ }
+ *spirv = nullptr;
+ *spirv_len = 0;
+
+ if (stage_int != 0 && stage_int != 4 && stage_int != 5) {
+ fprintf(stderr, "Invalid shader stage\n");
+ return nullptr;
+ }
+ EShLanguage stage = static_cast<EShLanguage>(stage_int);
+
+ if (!initialized) {
+ glslang::InitializeProcess();
+ initialized = true;
+ }
+
+ glslang::TShader shader(stage);
+ shader.setStrings(&glsl, 1);
+ shader.setEnvInput(glslang::EShSourceGlsl, stage, glslang::EShClientVulkan, 100);
+ shader.setEnvClient(glslang::EShClientVulkan, glslang::EShTargetVulkan_1_1);
+ shader.setEnvTarget(glslang::EShTargetSpv, glslang::EShTargetSpv_1_3);
+ if (!shader.parse(&DefaultTBuiltInResource, 100, true, EShMsgDefault)) {
+ fprintf(stderr, "Parse failed\n");
+ fprintf(stderr, "%s\n", shader.getInfoLog());
+ return nullptr;
+ }
+
+ glslang::TProgram program;
+ program.addShader(&shader);
+ if (!program.link(EShMsgDefault)) {
+ fprintf(stderr, "Link failed\n");
+ fprintf(stderr, "%s\n", program.getInfoLog());
+ return nullptr;
+ }
+
+ glslang::SpvOptions spvOptions;
+ spvOptions.generateDebugInfo = gen_debug;
+ spvOptions.optimizeSize = false;
+ spvOptions.disassemble = false;
+ spvOptions.validate = false;
+
+ std::vector<uint32_t>* output = new std::vector<uint32_t>;
+ glslang::GlslangToSpv(*program.getIntermediate(stage), *output, nullptr, &spvOptions);
+
+ *spirv_len = output->size();
+ *spirv = output->data();
+ return output;
+}
+
+/*
+ * Destroys a buffer created by convert_glsl_to_spirv
+ */
+EMSCRIPTEN_KEEPALIVE
+void destroy_output_buffer(void* p)
+{
+ delete static_cast<std::vector<uint32_t>*>(p);
+}
+
+} // extern "C"
+
+/*
+ * For non-Emscripten builds we supply a generic main, so that the glslang.js
+ * build target can generate an executable with a trivial use case instead of
+ * generating a WASM binary. This is done so that there is a target that can be
+ * built and output analyzed using desktop tools, since WASM binaries are
+ * specific to the Emscripten toolchain.
+ */
+#ifndef __EMSCRIPTEN__
+int main() {
+ const char* input = R"(#version 310 es
+
+void main() { })";
+
+ uint32_t* output;
+ size_t output_len;
+
+ void* id = convert_glsl_to_spirv(input, 4, false, &output, &output_len);
+ assert(output != nullptr);
+ assert(output_len != 0);
+ destroy_output_buffer(id);
+ return 0;
+}
+#endif // ifndef __EMSCRIPTEN__
diff --git a/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js b/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js
new file mode 100644
index 0000000000..7d3fd0234c
--- /dev/null
+++ b/thirdparty/glslang/glslang/OSDependent/Web/glslang.pre.js
@@ -0,0 +1,45 @@
+Module['compileGLSLZeroCopy'] = function(glsl, shader_stage, gen_debug) {
+ gen_debug = !!gen_debug;
+
+ var shader_stage_int;
+ if (shader_stage === 'vertex') {
+ shader_stage_int = 0;
+ } else if (shader_stage === 'fragment') {
+ shader_stage_int = 4;
+ } else if (shader_stage === 'compute') {
+ shader_stage_int = 5;
+ } else {
+ throw new Error("shader_stage must be 'vertex', 'fragment', or 'compute'");
+ }
+
+ var p_output = Module['_malloc'](4);
+ var p_output_len = Module['_malloc'](4);
+ var id = ccall('convert_glsl_to_spirv',
+ 'number',
+ ['string', 'number', 'boolean', 'number', 'number'],
+ [glsl, shader_stage_int, gen_debug, p_output, p_output_len]);
+ var output = getValue(p_output, 'i32');
+ var output_len = getValue(p_output_len, 'i32');
+ Module['_free'](p_output);
+ Module['_free'](p_output_len);
+
+ if (id === 0) {
+ throw new Error('GLSL compilation failed');
+ }
+
+ var ret = {};
+ var outputIndexU32 = output / 4;
+ ret['data'] = Module['HEAPU32'].subarray(outputIndexU32, outputIndexU32 + output_len);
+ ret['free'] = function() {
+ Module['_destroy_output_buffer'](id);
+ };
+
+ return ret;
+};
+
+Module['compileGLSL'] = function(glsl, shader_stage, gen_debug) {
+ var compiled = Module['compileGLSLZeroCopy'](glsl, shader_stage, gen_debug);
+ var ret = compiled['data'].slice()
+ compiled['free']();
+ return ret;
+};
diff --git a/thirdparty/glslang/glslang/Public/ShaderLang.h b/thirdparty/glslang/glslang/Public/ShaderLang.h
index 33f05e2cdf..4fe5c7df19 100644..100755
--- a/thirdparty/glslang/glslang/Public/ShaderLang.h
+++ b/thirdparty/glslang/glslang/Public/ShaderLang.h
@@ -68,7 +68,7 @@
// This should always increase, as some paths to do not consume
// a more major number.
// It should increment by one when new functionality is added.
-#define GLSLANG_MINOR_VERSION 12
+#define GLSLANG_MINOR_VERSION 13
//
// Call before doing any other compiler/linker operations.
@@ -126,36 +126,38 @@ class TType;
typedef enum {
EShSourceNone,
- EShSourceGlsl,
- EShSourceHlsl,
-} EShSource; // if EShLanguage were EShStage, this could be EShLanguage instead
+ EShSourceGlsl, // GLSL, includes ESSL (OpenGL ES GLSL)
+ EShSourceHlsl, // HLSL
+} EShSource; // if EShLanguage were EShStage, this could be EShLanguage instead
typedef enum {
- EShClientNone,
+ EShClientNone, // use when there is no client, e.g. for validation
EShClientVulkan,
EShClientOpenGL,
} EShClient;
typedef enum {
EShTargetNone,
- EShTargetSpv, // preferred spelling
+ EShTargetSpv, // SPIR-V (preferred spelling)
EshTargetSpv = EShTargetSpv, // legacy spelling
} EShTargetLanguage;
typedef enum {
- EShTargetVulkan_1_0 = (1 << 22),
- EShTargetVulkan_1_1 = (1 << 22) | (1 << 12),
- EShTargetOpenGL_450 = 450,
+ EShTargetVulkan_1_0 = (1 << 22), // Vulkan 1.0
+ EShTargetVulkan_1_1 = (1 << 22) | (1 << 12), // Vulkan 1.1
+ EShTargetVulkan_1_2 = (1 << 22) | (2 << 12), // Vulkan 1.2
+ EShTargetOpenGL_450 = 450, // OpenGL
} EShTargetClientVersion;
typedef EShTargetClientVersion EshTargetClientVersion;
typedef enum {
- EShTargetSpv_1_0 = (1 << 16),
- EShTargetSpv_1_1 = (1 << 16) | (1 << 8),
- EShTargetSpv_1_2 = (1 << 16) | (2 << 8),
- EShTargetSpv_1_3 = (1 << 16) | (3 << 8),
- EShTargetSpv_1_4 = (1 << 16) | (4 << 8),
+ EShTargetSpv_1_0 = (1 << 16), // SPIR-V 1.0
+ EShTargetSpv_1_1 = (1 << 16) | (1 << 8), // SPIR-V 1.1
+ EShTargetSpv_1_2 = (1 << 16) | (2 << 8), // SPIR-V 1.2
+ EShTargetSpv_1_3 = (1 << 16) | (3 << 8), // SPIR-V 1.3
+ EShTargetSpv_1_4 = (1 << 16) | (4 << 8), // SPIR-V 1.4
+ EShTargetSpv_1_5 = (1 << 16) | (5 << 8), // SPIR-V 1.5
} EShTargetLanguageVersion;
struct TInputLanguage {
@@ -432,15 +434,42 @@ public:
void addUniformLocationOverride(const char* name, int loc);
void setUniformLocationBase(int base);
void setInvertY(bool invert);
+#ifdef ENABLE_HLSL
void setHlslIoMapping(bool hlslIoMap);
void setFlattenUniformArrays(bool flatten);
+#endif
void setNoStorageFormat(bool useUnknownFormat);
+ void setNanMinMaxClamp(bool nanMinMaxClamp);
void setTextureSamplerTransformMode(EShTextureSamplerTransformMode mode);
// For setting up the environment (cleared to nothingness in the constructor).
// These must be called so that parsing is done for the right source language and
// target environment, either indirectly through TranslateEnvironment() based on
// EShMessages et. al., or directly by the user.
+ //
+ // setEnvInput: The input source language and stage. If generating code for a
+ // specific client, the input client semantics to use and the
+ // version of the that client's input semantics to use, otherwise
+ // use EShClientNone and version of 0, e.g. for validation mode.
+ // Note 'version' does not describe the target environment,
+ // just the version of the source dialect to compile under.
+ //
+ // See the definitions of TEnvironment, EShSource, EShLanguage,
+ // and EShClient for choices and more detail.
+ //
+ // setEnvClient: The client that will be hosting the execution, and it's version.
+ // Note 'version' is not the version of the languages involved, but
+ // the version of the client environment.
+ // Use EShClientNone and version of 0 if there is no client, e.g.
+ // for validation mode.
+ //
+ // See EShTargetClientVersion for choices.
+ //
+ // setEnvTarget: The language to translate to when generating code, and that
+ // language's version.
+ // Use EShTargetNone and version of 0 if there is no client, e.g.
+ // for validation mode.
+ //
void setEnvInput(EShSource lang, EShLanguage envStage, EShClient client, int version)
{
environment.input.languageFamily = lang;
@@ -458,8 +487,15 @@ public:
environment.target.language = lang;
environment.target.version = version;
}
+
+ void getStrings(const char* const* &s, int& n) { s = strings; n = numStrings; }
+
+#ifdef ENABLE_HLSL
void setEnvTargetHlslFunctionality1() { environment.target.hlslFunctionality1 = true; }
bool getEnvTargetHlslFunctionality1() const { return environment.target.hlslFunctionality1; }
+#else
+ bool getEnvTargetHlslFunctionality1() const { return false; }
+#endif
// Interface to #include handlers.
//
@@ -610,6 +646,8 @@ private:
TShader& operator=(TShader&);
};
+#ifndef GLSLANG_WEB
+
//
// A reflection database and its interface, consistent with the OpenGL API reflection queries.
//
@@ -645,8 +683,9 @@ protected:
const TType* type;
};
-class TReflection;
-class TIoMapper;
+class TReflection;
+class TIoMapper;
+struct TVarEntryInfo;
// Allows to customize the binding layout after linking.
// All used uniform variables will invoke at least validateBinding.
@@ -667,53 +706,65 @@ class TIoMapper;
// notifiy callbacks, this phase ends with a call to endNotifications.
// Phase two starts directly after the call to endNotifications
// and calls all other callbacks to validate and to get the
-// bindings, sets, locations, component and color indices.
+// bindings, sets, locations, component and color indices.
//
// NOTE: that still limit checks are applied to bindings and sets
// and may result in an error.
class TIoMapResolver
{
public:
- virtual ~TIoMapResolver() {}
-
- // Should return true if the resulting/current binding would be okay.
- // Basic idea is to do aliasing binding checks with this.
- virtual bool validateBinding(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return a value >= 0 if the current binding should be overridden.
- // Return -1 if the current binding (including no binding) should be kept.
- virtual int resolveBinding(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return a value >= 0 if the current set should be overridden.
- // Return -1 if the current set (including no set) should be kept.
- virtual int resolveSet(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return a value >= 0 if the current location should be overridden.
- // Return -1 if the current location (including no location) should be kept.
- virtual int resolveUniformLocation(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return true if the resulting/current setup would be okay.
- // Basic idea is to do aliasing checks and reject invalid semantic names.
- virtual bool validateInOut(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return a value >= 0 if the current location should be overridden.
- // Return -1 if the current location (including no location) should be kept.
- virtual int resolveInOutLocation(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return a value >= 0 if the current component index should be overridden.
- // Return -1 if the current component index (including no index) should be kept.
- virtual int resolveInOutComponent(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Should return a value >= 0 if the current color index should be overridden.
- // Return -1 if the current color index (including no index) should be kept.
- virtual int resolveInOutIndex(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Notification of a uniform variable
- virtual void notifyBinding(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Notification of a in or out variable
- virtual void notifyInOut(EShLanguage stage, const char* name, const TType& type, bool is_live) = 0;
- // Called by mapIO when it has finished the notify pass
- virtual void endNotifications(EShLanguage stage) = 0;
- // Called by mapIO when it starts its notify pass for the given stage
- virtual void beginNotifications(EShLanguage stage) = 0;
- // Called by mipIO when it starts its resolve pass for the given stage
- virtual void beginResolve(EShLanguage stage) = 0;
- // Called by mapIO when it has finished the resolve pass
- virtual void endResolve(EShLanguage stage) = 0;
+ virtual ~TIoMapResolver() {}
+
+ // Should return true if the resulting/current binding would be okay.
+ // Basic idea is to do aliasing binding checks with this.
+ virtual bool validateBinding(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return a value >= 0 if the current binding should be overridden.
+ // Return -1 if the current binding (including no binding) should be kept.
+ virtual int resolveBinding(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return a value >= 0 if the current set should be overridden.
+ // Return -1 if the current set (including no set) should be kept.
+ virtual int resolveSet(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return a value >= 0 if the current location should be overridden.
+ // Return -1 if the current location (including no location) should be kept.
+ virtual int resolveUniformLocation(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return true if the resulting/current setup would be okay.
+ // Basic idea is to do aliasing checks and reject invalid semantic names.
+ virtual bool validateInOut(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return a value >= 0 if the current location should be overridden.
+ // Return -1 if the current location (including no location) should be kept.
+ virtual int resolveInOutLocation(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return a value >= 0 if the current component index should be overridden.
+ // Return -1 if the current component index (including no index) should be kept.
+ virtual int resolveInOutComponent(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Should return a value >= 0 if the current color index should be overridden.
+ // Return -1 if the current color index (including no index) should be kept.
+ virtual int resolveInOutIndex(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Notification of a uniform variable
+ virtual void notifyBinding(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Notification of a in or out variable
+ virtual void notifyInOut(EShLanguage stage, TVarEntryInfo& ent) = 0;
+ // Called by mapIO when it starts its notify pass for the given stage
+ virtual void beginNotifications(EShLanguage stage) = 0;
+ // Called by mapIO when it has finished the notify pass
+ virtual void endNotifications(EShLanguage stage) = 0;
+ // Called by mipIO when it starts its resolve pass for the given stage
+ virtual void beginResolve(EShLanguage stage) = 0;
+ // Called by mapIO when it has finished the resolve pass
+ virtual void endResolve(EShLanguage stage) = 0;
+ // Called by mapIO when it starts its symbol collect for teh given stage
+ virtual void beginCollect(EShLanguage stage) = 0;
+ // Called by mapIO when it has finished the symbol collect
+ virtual void endCollect(EShLanguage stage) = 0;
+ // Called by TSlotCollector to resolve storage locations or bindings
+ virtual void reserverStorageSlot(TVarEntryInfo& ent, TInfoSink& infoSink) = 0;
+ // Called by TSlotCollector to resolve resource locations or bindings
+ virtual void reserverResourceSlot(TVarEntryInfo& ent, TInfoSink& infoSink) = 0;
+ // Called by mapIO.addStage to set shader stage mask to mark a stage be added to this pipeline
+ virtual void addStage(EShLanguage stage) = 0;
};
+#endif // GLSLANG_WEB
+
// Make one TProgram per set of shaders that will get linked together. Add all
// the shaders that are to be linked together. After calling shader.parse()
// for all shaders, call link().
@@ -725,7 +776,7 @@ public:
TProgram();
virtual ~TProgram();
void addShader(TShader* shader) { stages[shader->stage].push_back(shader); }
-
+ std::list<TShader*>& getShaders(EShLanguage stage) { return stages[stage]; }
// Link Validation interface
bool link(EShMessages);
const char* getInfoLog();
@@ -733,14 +784,15 @@ public:
TIntermediate* getIntermediate(EShLanguage stage) const { return intermediate[stage]; }
+#ifndef GLSLANG_WEB
+
// Reflection Interface
// call first, to do liveness analysis, index mapping, etc.; returns false on failure
- bool buildReflection(int opts = EShReflectionDefault);
-
+ bool buildReflection(int opts = EShReflectionDefault);
unsigned getLocalSize(int dim) const; // return dim'th local size
int getReflectionIndex(const char *name) const;
-
+ int getReflectionPipeIOIndex(const char* name, const bool inOrOut) const;
int getNumUniformVariables() const;
const TObjectReflection& getUniform(int index) const;
int getNumUniformBlocks() const;
@@ -770,6 +822,9 @@ public:
// can be used for glGetUniformIndices()
int getUniformIndex(const char *name) const { return getReflectionIndex(name); }
+ int getPipeIOIndex(const char *name, const bool inOrOut) const
+ { return getReflectionPipeIOIndex(name, inOrOut); }
+
// can be used for "name" part of glGetActiveUniform()
const char *getUniformName(int index) const { return getUniform(index).name.c_str(); }
@@ -819,11 +874,11 @@ public:
const TType *getAttributeTType(int index) const { return getPipeInput(index).getType(); }
void dumpReflection();
-
// I/O mapping: apply base offsets and map live unbound variables
// If resolver is not provided it uses the previous approach
// and respects auto assignment and offsets.
- bool mapIO(TIoMapResolver* resolver = NULL);
+ bool mapIO(TIoMapResolver* pResolver = nullptr, TIoMapper* pIoMapper = nullptr);
+#endif
protected:
bool linkStage(EShLanguage, EShMessages);
@@ -833,8 +888,9 @@ protected:
TIntermediate* intermediate[EShLangCount];
bool newedIntermediate[EShLangCount]; // track which intermediate were "new" versus reusing a singleton unit in a stage
TInfoSink* infoSink;
+#ifndef GLSLANG_WEB
TReflection* reflection;
- TIoMapper* ioMapper;
+#endif
bool linked;
private:
diff --git a/thirdparty/glslang/patches/fix-mingw-snprintf.patch b/thirdparty/glslang/patches/fix-mingw-snprintf.patch
new file mode 100644
index 0000000000..2a51bc1f22
--- /dev/null
+++ b/thirdparty/glslang/patches/fix-mingw-snprintf.patch
@@ -0,0 +1,15 @@
+diff --git a/thirdparty/glslang/glslang/Include/Common.h b/thirdparty/glslang/glslang/Include/Common.h
+index 733a790cfd..2c511bc1c5 100644
+--- a/thirdparty/glslang/glslang/Include/Common.h
++++ b/thirdparty/glslang/glslang/Include/Common.h
+@@ -50,7 +50,9 @@ std::string to_string(const T& val) {
+ }
+ #endif
+
+-#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) || defined MINGW_HAS_SECURE_API
++// -- GODOT start --
++#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) /* || defined MINGW_HAS_SECURE_API */
++// -- GODOT end --
+ #include <basetsd.h>
+ #ifndef snprintf
+ #define snprintf sprintf_s
diff --git a/thirdparty/mbedtls/include/mbedtls/version.h b/thirdparty/mbedtls/include/mbedtls/version.h
index aeffb16699..8e2ce03c32 100644
--- a/thirdparty/mbedtls/include/mbedtls/version.h
+++ b/thirdparty/mbedtls/include/mbedtls/version.h
@@ -40,16 +40,16 @@
*/
#define MBEDTLS_VERSION_MAJOR 2
#define MBEDTLS_VERSION_MINOR 16
-#define MBEDTLS_VERSION_PATCH 4
+#define MBEDTLS_VERSION_PATCH 5
/**
* The single version number has the following structure:
* MMNNPP00
* Major version | Minor version | Patch version
*/
-#define MBEDTLS_VERSION_NUMBER 0x02100400
-#define MBEDTLS_VERSION_STRING "2.16.4"
-#define MBEDTLS_VERSION_STRING_FULL "mbed TLS 2.16.4"
+#define MBEDTLS_VERSION_NUMBER 0x02100500
+#define MBEDTLS_VERSION_STRING "2.16.5"
+#define MBEDTLS_VERSION_STRING_FULL "mbed TLS 2.16.5"
#if defined(MBEDTLS_VERSION_C)
diff --git a/thirdparty/mbedtls/library/bignum.c b/thirdparty/mbedtls/library/bignum.c
index 6713bcbf6f..87ccf42fad 100644
--- a/thirdparty/mbedtls/library/bignum.c
+++ b/thirdparty/mbedtls/library/bignum.c
@@ -157,9 +157,10 @@ int mbedtls_mpi_shrink( mbedtls_mpi *X, size_t nblimbs )
if( nblimbs > MBEDTLS_MPI_MAX_LIMBS )
return( MBEDTLS_ERR_MPI_ALLOC_FAILED );
- /* Actually resize up in this case */
+ /* Actually resize up if there are currently fewer than nblimbs limbs. */
if( X->n <= nblimbs )
return( mbedtls_mpi_grow( X, nblimbs ) );
+ /* After this point, then X->n > nblimbs and in particular X->n > 0. */
for( i = X->n - 1; i > 0; i-- )
if( X->p[i] != 0 )
@@ -198,7 +199,7 @@ int mbedtls_mpi_copy( mbedtls_mpi *X, const mbedtls_mpi *Y )
if( X == Y )
return( 0 );
- if( Y->p == NULL )
+ if( Y->n == 0 )
{
mbedtls_mpi_free( X );
return( 0 );
diff --git a/thirdparty/mbedtls/library/cipher.c b/thirdparty/mbedtls/library/cipher.c
index 273997577b..8d010b59ac 100644
--- a/thirdparty/mbedtls/library/cipher.c
+++ b/thirdparty/mbedtls/library/cipher.c
@@ -361,6 +361,10 @@ int mbedtls_cipher_update( mbedtls_cipher_context_t *ctx, const unsigned char *i
*olen = 0;
block_size = mbedtls_cipher_get_block_size( ctx );
+ if ( 0 == block_size )
+ {
+ return( MBEDTLS_ERR_CIPHER_INVALID_CONTEXT );
+ }
if( ctx->cipher_info->mode == MBEDTLS_MODE_ECB )
{
@@ -396,11 +400,6 @@ int mbedtls_cipher_update( mbedtls_cipher_context_t *ctx, const unsigned char *i
}
#endif
- if ( 0 == block_size )
- {
- return( MBEDTLS_ERR_CIPHER_INVALID_CONTEXT );
- }
-
if( input == output &&
( ctx->unprocessed_len != 0 || ilen % block_size ) )
{
@@ -459,11 +458,6 @@ int mbedtls_cipher_update( mbedtls_cipher_context_t *ctx, const unsigned char *i
*/
if( 0 != ilen )
{
- if( 0 == block_size )
- {
- return( MBEDTLS_ERR_CIPHER_INVALID_CONTEXT );
- }
-
/* Encryption: only cache partial blocks
* Decryption w/ padding: always keep at least one whole block
* Decryption w/o padding: only cache partial blocks
diff --git a/thirdparty/mbedtls/library/ecdsa.c b/thirdparty/mbedtls/library/ecdsa.c
index 3cf3d7cc4f..6b72e0d927 100644
--- a/thirdparty/mbedtls/library/ecdsa.c
+++ b/thirdparty/mbedtls/library/ecdsa.c
@@ -297,7 +297,7 @@ static int ecdsa_sign_restartable( mbedtls_ecp_group *grp,
*p_sign_tries = 0;
do
{
- if( *p_sign_tries++ > 10 )
+ if( (*p_sign_tries)++ > 10 )
{
ret = MBEDTLS_ERR_ECP_RANDOM_FAILED;
goto cleanup;
@@ -310,7 +310,7 @@ static int ecdsa_sign_restartable( mbedtls_ecp_group *grp,
*p_key_tries = 0;
do
{
- if( *p_key_tries++ > 10 )
+ if( (*p_key_tries)++ > 10 )
{
ret = MBEDTLS_ERR_ECP_RANDOM_FAILED;
goto cleanup;
diff --git a/thirdparty/mbedtls/library/pkparse.c b/thirdparty/mbedtls/library/pkparse.c
index ae210bca6a..d5004577a1 100644
--- a/thirdparty/mbedtls/library/pkparse.c
+++ b/thirdparty/mbedtls/library/pkparse.c
@@ -678,6 +678,32 @@ int mbedtls_pk_parse_subpubkey( unsigned char **p, const unsigned char *end,
#if defined(MBEDTLS_RSA_C)
/*
+ * Wrapper around mbedtls_asn1_get_mpi() that rejects zero.
+ *
+ * The value zero is:
+ * - never a valid value for an RSA parameter
+ * - interpreted as "omitted, please reconstruct" by mbedtls_rsa_complete().
+ *
+ * Since values can't be omitted in PKCS#1, passing a zero value to
+ * rsa_complete() would be incorrect, so reject zero values early.
+ */
+static int asn1_get_nonzero_mpi( unsigned char **p,
+ const unsigned char *end,
+ mbedtls_mpi *X )
+{
+ int ret;
+
+ ret = mbedtls_asn1_get_mpi( p, end, X );
+ if( ret != 0 )
+ return( ret );
+
+ if( mbedtls_mpi_cmp_int( X, 0 ) == 0 )
+ return( MBEDTLS_ERR_PK_KEY_INVALID_FORMAT );
+
+ return( 0 );
+}
+
+/*
* Parse a PKCS#1 encoded private RSA key
*/
static int pk_parse_key_pkcs1_der( mbedtls_rsa_context *rsa,
@@ -729,54 +755,84 @@ static int pk_parse_key_pkcs1_der( mbedtls_rsa_context *rsa,
}
/* Import N */
- if( ( ret = mbedtls_asn1_get_tag( &p, end, &len,
- MBEDTLS_ASN1_INTEGER ) ) != 0 ||
- ( ret = mbedtls_rsa_import_raw( rsa, p, len, NULL, 0, NULL, 0,
- NULL, 0, NULL, 0 ) ) != 0 )
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_rsa_import( rsa, &T, NULL, NULL,
+ NULL, NULL ) ) != 0 )
goto cleanup;
- p += len;
/* Import E */
- if( ( ret = mbedtls_asn1_get_tag( &p, end, &len,
- MBEDTLS_ASN1_INTEGER ) ) != 0 ||
- ( ret = mbedtls_rsa_import_raw( rsa, NULL, 0, NULL, 0, NULL, 0,
- NULL, 0, p, len ) ) != 0 )
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_rsa_import( rsa, NULL, NULL, NULL,
+ NULL, &T ) ) != 0 )
goto cleanup;
- p += len;
/* Import D */
- if( ( ret = mbedtls_asn1_get_tag( &p, end, &len,
- MBEDTLS_ASN1_INTEGER ) ) != 0 ||
- ( ret = mbedtls_rsa_import_raw( rsa, NULL, 0, NULL, 0, NULL, 0,
- p, len, NULL, 0 ) ) != 0 )
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_rsa_import( rsa, NULL, NULL, NULL,
+ &T, NULL ) ) != 0 )
goto cleanup;
- p += len;
/* Import P */
- if( ( ret = mbedtls_asn1_get_tag( &p, end, &len,
- MBEDTLS_ASN1_INTEGER ) ) != 0 ||
- ( ret = mbedtls_rsa_import_raw( rsa, NULL, 0, p, len, NULL, 0,
- NULL, 0, NULL, 0 ) ) != 0 )
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_rsa_import( rsa, NULL, &T, NULL,
+ NULL, NULL ) ) != 0 )
goto cleanup;
- p += len;
/* Import Q */
- if( ( ret = mbedtls_asn1_get_tag( &p, end, &len,
- MBEDTLS_ASN1_INTEGER ) ) != 0 ||
- ( ret = mbedtls_rsa_import_raw( rsa, NULL, 0, NULL, 0, p, len,
- NULL, 0, NULL, 0 ) ) != 0 )
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_rsa_import( rsa, NULL, NULL, &T,
+ NULL, NULL ) ) != 0 )
goto cleanup;
- p += len;
- /* Complete the RSA private key */
- if( ( ret = mbedtls_rsa_complete( rsa ) ) != 0 )
- goto cleanup;
+#if !defined(MBEDTLS_RSA_NO_CRT) && !defined(MBEDTLS_RSA_ALT)
+ /*
+ * The RSA CRT parameters DP, DQ and QP are nominally redundant, in
+ * that they can be easily recomputed from D, P and Q. However by
+ * parsing them from the PKCS1 structure it is possible to avoid
+ * recalculating them which both reduces the overhead of loading
+ * RSA private keys into memory and also avoids side channels which
+ * can arise when computing those values, since all of D, P, and Q
+ * are secret. See https://eprint.iacr.org/2020/055 for a
+ * description of one such attack.
+ */
+
+ /* Import DP */
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_mpi_copy( &rsa->DP, &T ) ) != 0 )
+ goto cleanup;
+
+ /* Import DQ */
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_mpi_copy( &rsa->DQ, &T ) ) != 0 )
+ goto cleanup;
+
+ /* Import QP */
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = mbedtls_mpi_copy( &rsa->QP, &T ) ) != 0 )
+ goto cleanup;
+
+#else
+ /* Verify existance of the CRT params */
+ if( ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 ||
+ ( ret = asn1_get_nonzero_mpi( &p, end, &T ) ) != 0 )
+ goto cleanup;
+#endif
- /* Check optional parameters */
- if( ( ret = mbedtls_asn1_get_mpi( &p, end, &T ) ) != 0 ||
- ( ret = mbedtls_asn1_get_mpi( &p, end, &T ) ) != 0 ||
- ( ret = mbedtls_asn1_get_mpi( &p, end, &T ) ) != 0 )
+ /* rsa_complete() doesn't complete anything with the default
+ * implementation but is still called:
+ * - for the benefit of alternative implementation that may want to
+ * pre-compute stuff beyond what's provided (eg Montgomery factors)
+ * - as is also sanity-checks the key
+ *
+ * Furthermore, we also check the public part for consistency with
+ * mbedtls_pk_parse_pubkey(), as it includes size minima for example.
+ */
+ if( ( ret = mbedtls_rsa_complete( rsa ) ) != 0 ||
+ ( ret = mbedtls_rsa_check_pubkey( rsa ) ) != 0 )
+ {
goto cleanup;
+ }
if( p != end )
{
diff --git a/thirdparty/mbedtls/library/rsa.c b/thirdparty/mbedtls/library/rsa.c
index af1a878599..09fd379fdb 100644
--- a/thirdparty/mbedtls/library/rsa.c
+++ b/thirdparty/mbedtls/library/rsa.c
@@ -249,6 +249,9 @@ int mbedtls_rsa_complete( mbedtls_rsa_context *ctx )
{
int ret = 0;
int have_N, have_P, have_Q, have_D, have_E;
+#if !defined(MBEDTLS_RSA_NO_CRT)
+ int have_DP, have_DQ, have_QP;
+#endif
int n_missing, pq_missing, d_missing, is_pub, is_priv;
RSA_VALIDATE_RET( ctx != NULL );
@@ -259,6 +262,12 @@ int mbedtls_rsa_complete( mbedtls_rsa_context *ctx )
have_D = ( mbedtls_mpi_cmp_int( &ctx->D, 0 ) != 0 );
have_E = ( mbedtls_mpi_cmp_int( &ctx->E, 0 ) != 0 );
+#if !defined(MBEDTLS_RSA_NO_CRT)
+ have_DP = ( mbedtls_mpi_cmp_int( &ctx->DP, 0 ) != 0 );
+ have_DQ = ( mbedtls_mpi_cmp_int( &ctx->DQ, 0 ) != 0 );
+ have_QP = ( mbedtls_mpi_cmp_int( &ctx->QP, 0 ) != 0 );
+#endif
+
/*
* Check whether provided parameters are enough
* to deduce all others. The following incomplete
@@ -324,7 +333,7 @@ int mbedtls_rsa_complete( mbedtls_rsa_context *ctx )
*/
#if !defined(MBEDTLS_RSA_NO_CRT)
- if( is_priv )
+ if( is_priv && ! ( have_DP && have_DQ && have_QP ) )
{
ret = mbedtls_rsa_deduce_crt( &ctx->P, &ctx->Q, &ctx->D,
&ctx->DP, &ctx->DQ, &ctx->QP );
diff --git a/thirdparty/mbedtls/library/x509write_csr.c b/thirdparty/mbedtls/library/x509write_csr.c
index b65a11c6aa..7406a97542 100644
--- a/thirdparty/mbedtls/library/x509write_csr.c
+++ b/thirdparty/mbedtls/library/x509write_csr.c
@@ -226,7 +226,9 @@ int mbedtls_x509write_csr_der( mbedtls_x509write_csr *ctx, unsigned char *buf, s
/*
* Prepare signature
*/
- mbedtls_md( mbedtls_md_info_from_type( ctx->md_alg ), c, len, hash );
+ ret = mbedtls_md( mbedtls_md_info_from_type( ctx->md_alg ), c, len, hash );
+ if( ret != 0 )
+ return( ret );
if( ( ret = mbedtls_pk_sign( ctx->key, ctx->md_alg, hash, 0, sig, &sig_len,
f_rng, p_rng ) ) != 0 )
diff --git a/thirdparty/miniupnpc/miniupnpc/minissdpc.c b/thirdparty/miniupnpc/miniupnpc/minissdpc.c
index ea9af02e1f..36244dedec 100644
--- a/thirdparty/miniupnpc/miniupnpc/minissdpc.c
+++ b/thirdparty/miniupnpc/miniupnpc/minissdpc.c
@@ -62,13 +62,13 @@ struct sockaddr_un {
#include "miniupnpc_socketdef.h"
-#if !defined(__DragonFly__) && !defined(__OpenBSD__) && !defined(__NetBSD__) && !defined(__APPLE__) && !defined(_WIN32) && !defined(__CYGWIN__) && !defined(__sun) && !defined(__GNU__) && !defined(__FreeBSD_kernel__)
+#if !defined(__DragonFly__) && !defined(__OpenBSD__) && !defined(__NetBSD__) && !defined(__APPLE__) && !defined(_WIN32) && !defined(__CYGWIN__) && !defined(__sun) && !defined(__GNU__) && !defined(__FreeBSD_kernel__) && !defined(__HAIKU__)
#define HAS_IP_MREQN
#endif
#if !defined(HAS_IP_MREQN) && !defined(_WIN32)
#include <sys/ioctl.h>
-#if defined(__sun)
+#if defined(__sun) || defined(__HAIKU__)
#include <sys/sockio.h>
#endif
#endif
diff --git a/thirdparty/miniupnpc/miniupnpc/portlistingparse.c b/thirdparty/miniupnpc/miniupnpc/portlistingparse.c
index 55859f2714..18d967b877 100644
--- a/thirdparty/miniupnpc/miniupnpc/portlistingparse.c
+++ b/thirdparty/miniupnpc/miniupnpc/portlistingparse.c
@@ -1,7 +1,7 @@
/* $Id: portlistingparse.c,v 1.9 2015/07/15 12:41:13 nanard Exp $ */
/* MiniUPnP project
* http://miniupnp.free.fr/ or http://miniupnp.tuxfamily.org/
- * (c) 2011-2016 Thomas Bernard
+ * (c) 2011-2020 Thomas Bernard
* This software is subject to the conditions detailed
* in the LICENCE file provided within the distribution */
#include <string.h>
@@ -12,6 +12,11 @@
#include "portlistingparse.h"
#include "minixml.h"
+#if defined(__HAIKU__)
+/* rename our private function because Haiku already defines a atoui() function */
+#define atoui atoui2
+#endif
+
/* list of the elements */
static const struct {
const portMappingElt code;
diff --git a/thirdparty/miniupnpc/miniupnpc/upnpc.c b/thirdparty/miniupnpc/miniupnpc/upnpc.c
index 4325658bee..cb7f18b5f6 100644
--- a/thirdparty/miniupnpc/miniupnpc/upnpc.c
+++ b/thirdparty/miniupnpc/miniupnpc/upnpc.c
@@ -1,7 +1,7 @@
/* $Id: upnpc.c,v 1.119 2018/03/13 23:34:46 nanard Exp $ */
/* Project : miniupnp
* Author : Thomas Bernard
- * Copyright (c) 2005-2019 Thomas Bernard
+ * Copyright (c) 2005-2020 Thomas Bernard
* This software is subject to the conditions detailed in the
* LICENCE file provided in this distribution. */
@@ -580,7 +580,7 @@ int main(int argc, char ** argv)
}
#endif
printf("upnpc : miniupnpc library test client, version %s.\n", MINIUPNPC_VERSION_STRING);
- printf(" (c) 2005-2019 Thomas Bernard.\n");
+ printf(" (c) 2005-2020 Thomas Bernard.\n");
printf("Go to http://miniupnp.free.fr/ or https://miniupnp.tuxfamily.org/\n"
"for more information.\n");
/* command line processing */
diff --git a/thirdparty/vulkan/include/vulkan/vulkan.hpp b/thirdparty/vulkan/include/vulkan/vulkan.hpp
index c56dc796f0..441869898f 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan.hpp
+++ b/thirdparty/vulkan/include/vulkan/vulkan.hpp
@@ -1,4 +1,4 @@
-// Copyright (c) 2015-2019 The Khronos Group Inc.
+// Copyright (c) 2015-2020 The Khronos Group Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -56,11 +56,21 @@
# define VULKAN_HPP_ASSERT assert
#endif
-#if defined(__linux__) || defined(__APPLE__)
-# include <dlfcn.h>
+#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
+# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
+#endif
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
+# if defined(__linux__) || defined(__APPLE__)
+# include <dlfcn.h>
+# endif
+
+# if defined(_WIN32)
+# include <windows.h>
+# endif
#endif
-static_assert( VK_HEADER_VERSION == 127 , "Wrong VK_HEADER_VERSION!" );
+static_assert( VK_HEADER_VERSION == 131 , "Wrong VK_HEADER_VERSION!" );
// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
@@ -81,7 +91,7 @@ static_assert( VK_HEADER_VERSION == 127 , "Wrong VK_HEADER_VERSION!" );
#endif
// Windows defines MemoryBarrier which is deprecated and collides
-// with the vk::MemoryBarrier struct.
+// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
#if defined(MemoryBarrier)
#undef MemoryBarrier
#endif
@@ -259,6 +269,7 @@ namespace VULKAN_HPP_NAMESPACE
class Flags
{
public:
+ // constructors
VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
: m_mask(0)
{}
@@ -267,7 +278,7 @@ namespace VULKAN_HPP_NAMESPACE
: m_mask(static_cast<MaskType>(bit))
{}
- VULKAN_HPP_CONSTEXPR Flags(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
: m_mask(rhs.m_mask)
{}
@@ -275,65 +286,90 @@ namespace VULKAN_HPP_NAMESPACE
: m_mask(flags)
{}
- Flags<BitType> & operator=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+ // relational operators
+ VULKAN_HPP_CONSTEXPR bool operator<(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- m_mask = rhs.m_mask;
- return *this;
+ return m_mask < rhs.m_mask;
}
- Flags<BitType> & operator|=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator<=(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- m_mask |= rhs.m_mask;
- return *this;
+ return m_mask <= rhs.m_mask;
}
- Flags<BitType> & operator&=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator>(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- m_mask &= rhs.m_mask;
- return *this;
+ return m_mask > rhs.m_mask;
}
- Flags<BitType> & operator^=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator>=(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- m_mask ^= rhs.m_mask;
- return *this;
+ return m_mask >= rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType>(m_mask | rhs.m_mask);
+ return m_mask == rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType>(m_mask & rhs.m_mask);
+ return m_mask != rhs.m_mask;
}
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ // logical operator
+ VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType>(m_mask ^ rhs.m_mask);
+ return !m_mask;
}
- VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
+ // bitwise operators
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator&(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return !m_mask;
+ return Flags<BitType, MaskType>(m_mask & rhs.m_mask);
}
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator|(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return Flags<BitType>(m_mask ^ FlagTraits<BitType>::allFlags);
+ return Flags<BitType, MaskType>(m_mask | rhs.m_mask);
}
- VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator^(Flags<BitType, MaskType> const& rhs) const VULKAN_HPP_NOEXCEPT
{
- return m_mask == rhs.m_mask;
+ return Flags<BitType, MaskType>(m_mask ^ rhs.m_mask);
}
- VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator~() const VULKAN_HPP_NOEXCEPT
{
- return m_mask != rhs.m_mask;
+ return Flags<BitType, MaskType>(m_mask ^ FlagTraits<BitType>::allFlags);
+ }
+
+ // assignment operators
+ Flags<BitType, MaskType> & operator=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ m_mask = rhs.m_mask;
+ return *this;
+ }
+
+ Flags<BitType, MaskType> & operator|=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ m_mask |= rhs.m_mask;
+ return *this;
+ }
+
+ Flags<BitType, MaskType> & operator&=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ m_mask &= rhs.m_mask;
+ return *this;
}
+ Flags<BitType, MaskType> & operator^=(Flags<BitType, MaskType> const& rhs) VULKAN_HPP_NOEXCEPT
+ {
+ m_mask ^= rhs.m_mask;
+ return *this;
+ }
+
+ // cast operators
explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
{
return !!m_mask;
@@ -348,36 +384,62 @@ namespace VULKAN_HPP_NAMESPACE
MaskType m_mask;
};
- template <typename BitType>
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+ // relational operators
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
{
- return flags | bit;
+ return flags > bit;
}
- template <typename BitType>
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
{
- return flags & bit;
+ return flags >= bit;
}
- template <typename BitType>
- VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
{
- return flags ^ bit;
+ return flags < bit;
}
- template <typename BitType>
- VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ {
+ return flags <= bit;
+ }
+
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags == bit;
}
- template <typename BitType>
- VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
{
return flags != bit;
}
+ // bitwise operators
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator&(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ {
+ return flags & bit;
+ }
+
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator|(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ {
+ return flags | bit;
+ }
+
+ template <typename BitType, typename MaskType = VkFlags>
+ VULKAN_HPP_CONSTEXPR Flags<BitType, MaskType> operator^(BitType bit, Flags<BitType, MaskType> const& flags) VULKAN_HPP_NOEXCEPT
+ {
+ return flags ^ bit;
+ }
+
template <typename RefType>
class Optional
{
@@ -421,6 +483,18 @@ namespace VULKAN_HPP_NAMESPACE
static const bool valid = true;
};
+ template<typename Type, class...>
+ struct isPartOfStructureChain
+ {
+ static const bool valid = false;
+ };
+
+ template<typename Type, typename Head, typename... Tail>
+ struct isPartOfStructureChain<Type, Head, Tail...>
+ {
+ static const bool valid = std::is_same<Type, Head>::value || isPartOfStructureChain<Type, Tail...>::valid;
+ };
+
template <class Element>
class StructureChainElement
{
@@ -467,6 +541,45 @@ namespace VULKAN_HPP_NAMESPACE
);
}
+ template<typename ClassType>
+ void unlink() VULKAN_HPP_NOEXCEPT
+ {
+ static_assert(isPartOfStructureChain<ClassType, StructureElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!");
+ static_assert(!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<StructureElements...>>::type>::value, "It's not allowed to unlink the first element!");
+ VkBaseOutStructure * ptr = reinterpret_cast<VkBaseOutStructure*>(&get<ClassType>());
+ assert(ptr != nullptr);
+ VkBaseOutStructure ** ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
+ assert(*ppNext != nullptr);
+ while (*ppNext != ptr)
+ {
+ ppNext = &(*ppNext)->pNext;
+ assert(*ppNext != nullptr); // fires, if the ClassType member has already been unlinked !
+ }
+ assert(*ppNext == ptr);
+ *ppNext = (*ppNext)->pNext;
+ }
+
+ template <typename ClassType>
+ void relink() VULKAN_HPP_NOEXCEPT
+ {
+ static_assert(isPartOfStructureChain<ClassType, StructureElements...>::valid, "Can't relink Structure that's not part of this StructureChain!");
+ static_assert(!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<StructureElements...>>::type>::value, "It's not allowed to have the first element unlinked!");
+ VkBaseOutStructure * ptr = reinterpret_cast<VkBaseOutStructure*>(&get<ClassType>());
+ assert(ptr != nullptr);
+ VkBaseOutStructure ** ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
+ assert(*ppNext != nullptr);
+#if !defined(NDEBUG)
+ while (*ppNext)
+ {
+ assert(*ppNext != ptr); // fires, if the ClassType member has not been unlinked before
+ ppNext = &(*ppNext)->pNext;
+ }
+ ppNext = &(reinterpret_cast<VkBaseOutStructure*>(this)->pNext);
+#endif
+ ptr->pNext = *ppNext;
+ *ppNext = ptr;
+ }
+
private:
template<typename List, typename X>
void link() VULKAN_HPP_NOEXCEPT
@@ -533,7 +646,12 @@ namespace VULKAN_HPP_NAMESPACE
public:
using element_type = Type;
- explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
+ UniqueHandle()
+ : Deleter()
+ , m_value()
+ {}
+
+ explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
: Deleter( deleter)
, m_value( value )
{}
@@ -689,7 +807,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
}
- void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+ }
+
+ void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
}
@@ -834,6 +957,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
}
+ void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ }
+
void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
@@ -854,6 +982,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
}
+ void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+ }
+
void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
@@ -904,7 +1037,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdEndRenderPass( commandBuffer );
}
- void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
+ }
+
+ void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
}
@@ -934,7 +1072,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCmdNextSubpass( commandBuffer, contents );
}
- void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+ }
+
+ void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
}
@@ -1161,6 +1304,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
}
+ VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkAcquireProfilingLockKHR( device, pInfo );
+ }
+
VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
{
return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
@@ -1326,7 +1474,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
}
- VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
+ }
+
+ VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
}
@@ -1568,11 +1721,21 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo ) const VULKAN_HPP_NOEXCEPT
+ VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetBufferDeviceAddress( device, pInfo );
+ }
+
+ VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferDeviceAddressEXT( device, pInfo );
}
+ VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetBufferDeviceAddressKHR( device, pInfo );
+ }
+
void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
@@ -1588,6 +1751,16 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
}
+ uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
+ }
+
+ uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
+ }
+
VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
@@ -1635,6 +1808,16 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
}
+ uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
+ }
+
+ uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
+ }
+
PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceProcAddr( device, pName );
@@ -1810,6 +1993,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
}
+ VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
+ }
+
VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
@@ -1928,6 +2116,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
}
+ void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkReleaseProfilingLockKHR( device );
+ }
+
VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetCommandPool( device, commandPool, flags );
@@ -1948,6 +2141,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkResetFences( device, fenceCount, pFences );
}
+ void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
+ }
+
void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
@@ -1978,7 +2176,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
}
- VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkSignalSemaphore( device, pSignalInfo );
+ }
+
+ VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkSignalSemaphoreKHR( device, pSignalInfo );
}
@@ -2028,7 +2231,12 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
}
- VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkWaitSemaphores( device, pWaitInfo, timeout );
+ }
+
+ VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
}
@@ -2207,6 +2415,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
}
+ VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
+ }
+
VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
@@ -2387,6 +2600,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
}
+ void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+ }
+
void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
@@ -2464,6 +2682,11 @@ namespace VULKAN_HPP_NAMESPACE
return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
}
+ VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
+ }
+
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT
{
@@ -2562,20 +2785,20 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER)
# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
-# define VULKAN_HPP_DEFAULT_DISPATCHER ::vk::defaultDispatchLoaderDynamic
-# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace vk { DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
+# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
+# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
extern DispatchLoaderDynamic defaultDispatchLoaderDynamic;
# else
-# define VULKAN_HPP_DEFAULT_DISPATCHER ::vk::DispatchLoaderStatic()
+# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic()
# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
# endif
#endif
#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE)
# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
- #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::vk::DispatchLoaderDynamic
+ #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
# else
-# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::vk::DispatchLoaderStatic
+# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
# endif
#endif
@@ -2585,7 +2808,13 @@ namespace VULKAN_HPP_NAMESPACE
class ObjectDestroy
{
public:
- ObjectDestroy( OwnerType owner = OwnerType(), Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ ObjectDestroy()
+ : m_owner()
+ , m_allocationCallbacks( nullptr )
+ , m_dispatch( nullptr )
+ {}
+
+ ObjectDestroy( OwnerType owner, Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_allocationCallbacks( allocationCallbacks )
, m_dispatch( &dispatch )
@@ -2598,6 +2827,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void destroy(T t) VULKAN_HPP_NOEXCEPT
{
+ assert( m_owner && m_dispatch );
m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
}
@@ -2613,7 +2843,12 @@ namespace VULKAN_HPP_NAMESPACE
class ObjectDestroy<NoParent,Dispatch>
{
public:
- ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ ObjectDestroy()
+ : m_allocationCallbacks( nullptr )
+ , m_dispatch( nullptr )
+ {}
+
+ ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
: m_allocationCallbacks( allocationCallbacks )
, m_dispatch( &dispatch )
{}
@@ -2624,6 +2859,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void destroy(T t) VULKAN_HPP_NOEXCEPT
{
+ assert( m_dispatch );
t.destroy( m_allocationCallbacks, *m_dispatch );
}
@@ -2636,7 +2872,13 @@ namespace VULKAN_HPP_NAMESPACE
class ObjectFree
{
public:
- ObjectFree( OwnerType owner = OwnerType(), Optional<const AllocationCallbacks> allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
+ ObjectFree()
+ : m_owner()
+ , m_allocationCallbacks( nullptr )
+ , m_dispatch( nullptr )
+ {}
+
+ ObjectFree( OwnerType owner, Optional<const AllocationCallbacks> allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT
: m_owner( owner )
, m_allocationCallbacks( allocationCallbacks )
, m_dispatch( &dispatch )
@@ -2649,6 +2891,7 @@ namespace VULKAN_HPP_NAMESPACE
template <typename T>
void destroy(T t) VULKAN_HPP_NOEXCEPT
{
+ assert( m_owner && m_dispatch );
m_owner.free( t, m_allocationCallbacks, *m_dispatch );
}
@@ -2685,23 +2928,51 @@ namespace VULKAN_HPP_NAMESPACE
};
template <typename T, size_t N, size_t I>
- class ConstExpressionArrayCopy
+ class ConstExpression1DArrayCopy
{
public:
VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N], std::array<T,N> const& src) VULKAN_HPP_NOEXCEPT
{
dst[I-1] = src[I-1];
- ConstExpressionArrayCopy<T, N, I - 1>::copy(dst, src);
+ ConstExpression1DArrayCopy<T, N, I - 1>::copy(dst, src);
}
};
template <typename T, size_t N>
- class ConstExpressionArrayCopy<T, N, 0>
+ class ConstExpression1DArrayCopy<T, N, 0>
{
public:
VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N], std::array<T,N> const& /*src*/) VULKAN_HPP_NOEXCEPT {}
};
+ template <typename T, size_t N, size_t M, size_t I, size_t J>
+ class ConstExpression2DArrayCopy
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N][M], std::array<std::array<T,M>, N> const& src) VULKAN_HPP_NOEXCEPT
+ {
+ dst[I - 1][J - 1] = src[I - 1][J - 1];
+ ConstExpression2DArrayCopy<T, N, M, I, J - 1>::copy(dst, src);
+ }
+ };
+
+ template <typename T, size_t N, size_t M, size_t I>
+ class ConstExpression2DArrayCopy<T, N, M, I, 0>
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR_14 static void copy(T dst[N][M], std::array<std::array<T, M>, N> const& src) VULKAN_HPP_NOEXCEPT
+ {
+ ConstExpression2DArrayCopy<T, N, M, I - 1, M>::copy(dst, src);
+ }
+ };
+
+ template <typename T, size_t N, size_t M>
+ class ConstExpression2DArrayCopy<T, N, M, 0, 0>
+ {
+ public:
+ VULKAN_HPP_CONSTEXPR_14 static void copy(T /*dst*/[N][M], std::array<std::array<T, M>, N> const& /*src*/) VULKAN_HPP_NOEXCEPT {}
+ };
+
using Bool32 = uint32_t;
using DeviceAddress = uint64_t;
using DeviceSize = uint64_t;
@@ -2741,6 +3012,96 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class AccessFlagBits
+ {
+ eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+ eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+ eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+ eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+ eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+ eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+ eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+ eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+ eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+ eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+ eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+ eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+ eHostRead = VK_ACCESS_HOST_READ_BIT,
+ eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+ eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+ eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+ eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+ eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+ eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+ eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+ eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
+ eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
+ eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+ eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
+ eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
+ eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+ eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
+ {
+ switch ( value )
+ {
+ case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead";
+ case AccessFlagBits::eIndexRead : return "IndexRead";
+ case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead";
+ case AccessFlagBits::eUniformRead : return "UniformRead";
+ case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead";
+ case AccessFlagBits::eShaderRead : return "ShaderRead";
+ case AccessFlagBits::eShaderWrite : return "ShaderWrite";
+ case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead";
+ case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite";
+ case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
+ case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
+ case AccessFlagBits::eTransferRead : return "TransferRead";
+ case AccessFlagBits::eTransferWrite : return "TransferWrite";
+ case AccessFlagBits::eHostRead : return "HostRead";
+ case AccessFlagBits::eHostWrite : return "HostWrite";
+ case AccessFlagBits::eMemoryRead : return "MemoryRead";
+ case AccessFlagBits::eMemoryWrite : return "MemoryWrite";
+ case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
+ case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
+ case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
+ case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
+ case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX";
+ case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX";
+ case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
+ case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV";
+ case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV";
+ case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV";
+ case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class AcquireProfilingLockFlagBitsKHR
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
+ {
+ return "(void)";
+ }
+
+ enum class AttachmentDescriptionFlagBits
+ {
+ eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
+ {
+ switch ( value )
+ {
+ case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias";
+ default: return "invalid";
+ }
+ }
+
enum class AttachmentLoadOp
{
eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
@@ -2981,13 +3342,108 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class BufferCreateFlagBits
+ {
+ eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+ eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+ eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
+ eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
+ eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+ eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT,
+ eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case BufferCreateFlagBits::eSparseBinding : return "SparseBinding";
+ case BufferCreateFlagBits::eSparseResidency : return "SparseResidency";
+ case BufferCreateFlagBits::eSparseAliased : return "SparseAliased";
+ case BufferCreateFlagBits::eProtected : return "Protected";
+ case BufferCreateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+ default: return "invalid";
+ }
+ }
+
+ enum class BufferUsageFlagBits
+ {
+ eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+ eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+ eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+ eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+ eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+ eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
+ eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+ eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
+ eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
+ eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+ eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+ eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
+ eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
+ {
+ switch ( value )
+ {
+ case BufferUsageFlagBits::eTransferSrc : return "TransferSrc";
+ case BufferUsageFlagBits::eTransferDst : return "TransferDst";
+ case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+ case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+ case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer";
+ case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer";
+ case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer";
+ case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer";
+ case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer";
+ case BufferUsageFlagBits::eShaderDeviceAddress : return "ShaderDeviceAddress";
+ case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
+ case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
+ case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+ case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV";
+ default: return "invalid";
+ }
+ }
+
+ enum class BufferViewCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class BuildAccelerationStructureFlagBitsNV
+ {
+ eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV,
+ eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV,
+ ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV,
+ ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV,
+ eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate";
+ case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction";
+ case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace";
+ case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild";
+ case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory";
+ default: return "invalid";
+ }
+ }
+
enum class ChromaLocation
{
eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
- eMidpoint = VK_CHROMA_LOCATION_MIDPOINT,
- eCositedEvenKHR = VK_CHROMA_LOCATION_COSITED_EVEN_KHR,
- eMidpointKHR = VK_CHROMA_LOCATION_MIDPOINT_KHR
+ eMidpoint = VK_CHROMA_LOCATION_MIDPOINT
};
+ using ChromaLocationKHR = ChromaLocation;
VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
{
@@ -3019,6 +3475,26 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ColorComponentFlagBits
+ {
+ eR = VK_COLOR_COMPONENT_R_BIT,
+ eG = VK_COLOR_COMPONENT_G_BIT,
+ eB = VK_COLOR_COMPONENT_B_BIT,
+ eA = VK_COLOR_COMPONENT_A_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
+ {
+ switch ( value )
+ {
+ case ColorComponentFlagBits::eR : return "R";
+ case ColorComponentFlagBits::eG : return "G";
+ case ColorComponentFlagBits::eB : return "B";
+ case ColorComponentFlagBits::eA : return "A";
+ default: return "invalid";
+ }
+ }
+
enum class ColorSpaceKHR
{
eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
@@ -3081,6 +3557,70 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class CommandBufferResetFlagBits
+ {
+ eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources";
+ default: return "invalid";
+ }
+ }
+
+ enum class CommandBufferUsageFlagBits
+ {
+ eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+ eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+ eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit";
+ case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue";
+ case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse";
+ default: return "invalid";
+ }
+ }
+
+ enum class CommandPoolCreateFlagBits
+ {
+ eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+ eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+ eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandPoolCreateFlagBits::eTransient : return "Transient";
+ case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer";
+ case CommandPoolCreateFlagBits::eProtected : return "Protected";
+ default: return "invalid";
+ }
+ }
+
+ enum class CommandPoolResetFlagBits
+ {
+ eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
+ {
+ switch ( value )
+ {
+ case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources";
+ default: return "invalid";
+ }
+ }
+
enum class CompareOp
{
eNever = VK_COMPARE_OP_NEVER,
@@ -3169,6 +3709,40 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class CompositeAlphaFlagBitsKHR
+ {
+ eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+ ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+ ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+ eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque";
+ case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied";
+ case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied";
+ case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit";
+ default: return "invalid";
+ }
+ }
+
+ enum class ConditionalRenderingFlagBitsEXT
+ {
+ eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted";
+ default: return "invalid";
+ }
+ }
+
enum class ConservativeRasterizationModeEXT
{
eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
@@ -3239,6 +3813,48 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class CullModeFlagBits
+ {
+ eNone = VK_CULL_MODE_NONE,
+ eFront = VK_CULL_MODE_FRONT_BIT,
+ eBack = VK_CULL_MODE_BACK_BIT,
+ eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
+ {
+ switch ( value )
+ {
+ case CullModeFlagBits::eNone : return "None";
+ case CullModeFlagBits::eFront : return "Front";
+ case CullModeFlagBits::eBack : return "Back";
+ case CullModeFlagBits::eFrontAndBack : return "FrontAndBack";
+ default: return "invalid";
+ }
+ }
+
+ enum class DebugReportFlagBitsEXT
+ {
+ eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+ eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DebugReportFlagBitsEXT::eInformation : return "Information";
+ case DebugReportFlagBitsEXT::eWarning : return "Warning";
+ case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning";
+ case DebugReportFlagBitsEXT::eError : return "Error";
+ case DebugReportFlagBitsEXT::eDebug : return "Debug";
+ default: return "invalid";
+ }
+ }
+
enum class DebugReportObjectTypeEXT
{
eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
@@ -3329,6 +3945,119 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class DebugUtilsMessageSeverityFlagBitsEXT
+ {
+ eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+ eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+ eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
+ eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning";
+ case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error";
+ default: return "invalid";
+ }
+ }
+
+ enum class DebugUtilsMessageTypeFlagBitsEXT
+ {
+ eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+ eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+ ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General";
+ case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation";
+ case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance";
+ default: return "invalid";
+ }
+ }
+
+ enum class DependencyFlagBits
+ {
+ eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+ eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+ eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+ eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR,
+ eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
+ {
+ switch ( value )
+ {
+ case DependencyFlagBits::eByRegion : return "ByRegion";
+ case DependencyFlagBits::eDeviceGroup : return "DeviceGroup";
+ case DependencyFlagBits::eViewLocal : return "ViewLocal";
+ default: return "invalid";
+ }
+ }
+
+ enum class DescriptorBindingFlagBits
+ {
+ eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+ eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+ ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+ eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT
+ };
+ using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
+ {
+ switch ( value )
+ {
+ case DescriptorBindingFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
+ case DescriptorBindingFlagBits::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending";
+ case DescriptorBindingFlagBits::ePartiallyBound : return "PartiallyBound";
+ case DescriptorBindingFlagBits::eVariableDescriptorCount : return "VariableDescriptorCount";
+ default: return "invalid";
+ }
+ }
+
+ enum class DescriptorPoolCreateFlagBits
+ {
+ eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+ eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+ eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet";
+ case DescriptorPoolCreateFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
+ default: return "invalid";
+ }
+ }
+
+ enum class DescriptorSetLayoutCreateFlagBits
+ {
+ eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+ ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+ eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return "UpdateAfterBindPool";
+ case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR";
+ default: return "invalid";
+ }
+ }
+
enum class DescriptorType
{
eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
@@ -3370,9 +4099,9 @@ namespace VULKAN_HPP_NAMESPACE
enum class DescriptorUpdateTemplateType
{
eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
- ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR,
- eDescriptorSetKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR
+ ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
};
+ using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
{
@@ -3384,6 +4113,14 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class DeviceCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
+ {
+ return "(void)";
+ }
+
enum class DeviceEventTypeEXT
{
eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
@@ -3398,6 +4135,40 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class DeviceGroupPresentModeFlagBitsKHR
+ {
+ eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
+ eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
+ eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
+ eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local";
+ case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote";
+ case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum";
+ case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice";
+ default: return "invalid";
+ }
+ }
+
+ enum class DeviceQueueCreateFlagBits
+ {
+ eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case DeviceQueueCreateFlagBits::eProtected : return "Protected";
+ default: return "invalid";
+ }
+ }
+
enum class DiscardRectangleModeEXT
{
eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
@@ -3428,6 +4199,26 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class DisplayPlaneAlphaFlagBitsKHR
+ {
+ eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+ eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+ ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+ ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque";
+ case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global";
+ case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel";
+ case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied";
+ default: return "invalid";
+ }
+ }
+
enum class DisplayPowerStateEXT
{
eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
@@ -3446,38 +4237,40 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class DriverIdKHR
+ enum class DriverId
{
- eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY_KHR,
- eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR,
- eMesaRadv = VK_DRIVER_ID_MESA_RADV_KHR,
- eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR,
- eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR,
- eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR,
- eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR,
- eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR,
- eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY_KHR,
- eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR,
- eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY_KHR,
- eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR
+ eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY,
+ eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+ eMesaRadv = VK_DRIVER_ID_MESA_RADV,
+ eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+ eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+ eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+ eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+ eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+ eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY,
+ eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+ eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY,
+ eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+ eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR
};
+ using DriverIdKHR = DriverId;
- VULKAN_HPP_INLINE std::string to_string( DriverIdKHR value )
+ VULKAN_HPP_INLINE std::string to_string( DriverId value )
{
switch ( value )
{
- case DriverIdKHR::eAmdProprietary : return "AmdProprietary";
- case DriverIdKHR::eAmdOpenSource : return "AmdOpenSource";
- case DriverIdKHR::eMesaRadv : return "MesaRadv";
- case DriverIdKHR::eNvidiaProprietary : return "NvidiaProprietary";
- case DriverIdKHR::eIntelProprietaryWindows : return "IntelProprietaryWindows";
- case DriverIdKHR::eIntelOpenSourceMESA : return "IntelOpenSourceMESA";
- case DriverIdKHR::eImaginationProprietary : return "ImaginationProprietary";
- case DriverIdKHR::eQualcommProprietary : return "QualcommProprietary";
- case DriverIdKHR::eArmProprietary : return "ArmProprietary";
- case DriverIdKHR::eGoogleSwiftshader : return "GoogleSwiftshader";
- case DriverIdKHR::eGgpProprietary : return "GgpProprietary";
- case DriverIdKHR::eBroadcomProprietary : return "BroadcomProprietary";
+ case DriverId::eAmdProprietary : return "AmdProprietary";
+ case DriverId::eAmdOpenSource : return "AmdOpenSource";
+ case DriverId::eMesaRadv : return "MesaRadv";
+ case DriverId::eNvidiaProprietary : return "NvidiaProprietary";
+ case DriverId::eIntelProprietaryWindows : return "IntelProprietaryWindows";
+ case DriverId::eIntelOpenSourceMESA : return "IntelOpenSourceMESA";
+ case DriverId::eImaginationProprietary : return "ImaginationProprietary";
+ case DriverId::eQualcommProprietary : return "QualcommProprietary";
+ case DriverId::eArmProprietary : return "ArmProprietary";
+ case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader";
+ case DriverId::eGgpProprietary : return "GgpProprietary";
+ case DriverId::eBroadcomProprietary : return "BroadcomProprietary";
default: return "invalid";
}
}
@@ -3526,6 +4319,205 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ExternalFenceFeatureFlagBits
+ {
+ eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+ eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
+ };
+ using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalFenceFeatureFlagBits::eExportable : return "Exportable";
+ case ExternalFenceFeatureFlagBits::eImportable : return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalFenceHandleTypeFlagBits
+ {
+ eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
+ };
+ using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+ case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+ case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+ case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalMemoryFeatureFlagBits
+ {
+ eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+ eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+ eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
+ };
+ using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly";
+ case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable";
+ case ExternalMemoryFeatureFlagBits::eImportable : return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalMemoryFeatureFlagBitsNV
+ {
+ eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+ eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+ eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly";
+ case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable";
+ case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalMemoryHandleTypeFlagBits
+ {
+ eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+ eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+ eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+ eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+ eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+ eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
+ eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+ eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT
+ };
+ using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+ case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+ case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture";
+ case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt";
+ case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap";
+ case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource";
+ case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT";
+ case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID";
+ case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT";
+ case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalMemoryHandleTypeFlagBitsNV
+ {
+ eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+ eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+ eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+ eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32";
+ case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+ case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image";
+ case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalSemaphoreFeatureFlagBits
+ {
+ eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+ eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
+ };
+ using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable";
+ case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable";
+ default: return "invalid";
+ }
+ }
+
+ enum class ExternalSemaphoreHandleTypeFlagBits
+ {
+ eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+ eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+ eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+ eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+ eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT
+ };
+ using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
+ case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
+ case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence";
+ case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd";
+ default: return "invalid";
+ }
+ }
+
+ enum class FenceCreateFlagBits
+ {
+ eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case FenceCreateFlagBits::eSignaled : return "Signaled";
+ default: return "invalid";
+ }
+ }
+
+ enum class FenceImportFlagBits
+ {
+ eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
+ };
+ using FenceImportFlagBitsKHR = FenceImportFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
+ {
+ switch ( value )
+ {
+ case FenceImportFlagBits::eTemporary : return "Temporary";
+ default: return "invalid";
+ }
+ }
+
enum class Filter
{
eNearest = VK_FILTER_NEAREST,
@@ -4073,6 +5065,94 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class FormatFeatureFlagBits
+ {
+ eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+ eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+ eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+ eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+ eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+ eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+ eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+ eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+ eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+ eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+ eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+ eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+ eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+ eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+ eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+ eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+ eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+ eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+ eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+ eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+ eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
+ eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+ eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+ eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
+ eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+ eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
+ eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
+ eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
+ eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+ eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case FormatFeatureFlagBits::eSampledImage : return "SampledImage";
+ case FormatFeatureFlagBits::eStorageImage : return "StorageImage";
+ case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic";
+ case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
+ case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
+ case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
+ case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer";
+ case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment";
+ case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend";
+ case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+ case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc";
+ case FormatFeatureFlagBits::eBlitDst : return "BlitDst";
+ case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear";
+ case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc";
+ case FormatFeatureFlagBits::eTransferDst : return "TransferDst";
+ case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
+ case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
+ case FormatFeatureFlagBits::eDisjoint : return "Disjoint";
+ case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
+ case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax";
+ case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG";
+ case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class FramebufferCreateFlagBits
+ {
+ eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
+ eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case FramebufferCreateFlagBits::eImageless : return "Imageless";
+ default: return "invalid";
+ }
+ }
+
enum class FrontFace
{
eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
@@ -4111,6 +5191,42 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+ enum class GeometryFlagBitsNV
+ {
+ eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV,
+ eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case GeometryFlagBitsNV::eOpaque : return "Opaque";
+ case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
+ default: return "invalid";
+ }
+ }
+
+ enum class GeometryInstanceFlagBitsNV
+ {
+ eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+ eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV,
+ eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV,
+ eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value )
+ {
+ switch ( value )
+ {
+ case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable";
+ case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
+ case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque";
+ case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque";
+ default: return "invalid";
+ }
+ }
+
enum class GeometryTypeNV
{
eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV,
@@ -4127,6 +5243,91 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ImageAspectFlagBits
+ {
+ eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+ eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+ eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+ eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
+ ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
+ ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
+ ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
+ eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
+ eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
+ eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
+ eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
+ ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
+ ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
+ ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageAspectFlagBits::eColor : return "Color";
+ case ImageAspectFlagBits::eDepth : return "Depth";
+ case ImageAspectFlagBits::eStencil : return "Stencil";
+ case ImageAspectFlagBits::eMetadata : return "Metadata";
+ case ImageAspectFlagBits::ePlane0 : return "Plane0";
+ case ImageAspectFlagBits::ePlane1 : return "Plane1";
+ case ImageAspectFlagBits::ePlane2 : return "Plane2";
+ case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT";
+ case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT";
+ case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT";
+ case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class ImageCreateFlagBits
+ {
+ eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+ eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+ eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+ eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+ eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+ eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+ eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+ e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+ eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+ eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+ eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+ eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+ eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
+ eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
+ eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
+ eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+ e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+ eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
+ eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+ eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
+ eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageCreateFlagBits::eSparseBinding : return "SparseBinding";
+ case ImageCreateFlagBits::eSparseResidency : return "SparseResidency";
+ case ImageCreateFlagBits::eSparseAliased : return "SparseAliased";
+ case ImageCreateFlagBits::eMutableFormat : return "MutableFormat";
+ case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible";
+ case ImageCreateFlagBits::eAlias : return "Alias";
+ case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+ case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible";
+ case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible";
+ case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage";
+ case ImageCreateFlagBits::eProtected : return "Protected";
+ case ImageCreateFlagBits::eDisjoint : return "Disjoint";
+ case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV";
+ case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT";
+ case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+ default: return "invalid";
+ }
+ }
+
enum class ImageLayout
{
eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
@@ -4140,16 +5341,20 @@ namespace VULKAN_HPP_NAMESPACE
ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+ eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+ eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+ eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+ eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+ eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+ eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
- eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR,
- eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
- eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR
+ eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
};
VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
@@ -4167,14 +5372,14 @@ namespace VULKAN_HPP_NAMESPACE
case ImageLayout::ePreinitialized : return "Preinitialized";
case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal";
case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal";
+ case ImageLayout::eDepthAttachmentOptimal : return "DepthAttachmentOptimal";
+ case ImageLayout::eDepthReadOnlyOptimal : return "DepthReadOnlyOptimal";
+ case ImageLayout::eStencilAttachmentOptimal : return "StencilAttachmentOptimal";
+ case ImageLayout::eStencilReadOnlyOptimal : return "StencilReadOnlyOptimal";
case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR";
case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV";
case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
- case ImageLayout::eDepthAttachmentOptimalKHR : return "DepthAttachmentOptimalKHR";
- case ImageLayout::eDepthReadOnlyOptimalKHR : return "DepthReadOnlyOptimalKHR";
- case ImageLayout::eStencilAttachmentOptimalKHR : return "StencilAttachmentOptimalKHR";
- case ImageLayout::eStencilReadOnlyOptimalKHR : return "StencilReadOnlyOptimalKHR";
default: return "invalid";
}
}
@@ -4215,6 +5420,52 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ImageUsageFlagBits
+ {
+ eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+ eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+ eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+ eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+ eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+ eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+ eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+ eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
+ eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageUsageFlagBits::eTransferSrc : return "TransferSrc";
+ case ImageUsageFlagBits::eTransferDst : return "TransferDst";
+ case ImageUsageFlagBits::eSampled : return "Sampled";
+ case ImageUsageFlagBits::eStorage : return "Storage";
+ case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment";
+ case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
+ case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment";
+ case ImageUsageFlagBits::eInputAttachment : return "InputAttachment";
+ case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
+ case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class ImageViewCreateFlagBits
+ {
+ eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
+ default: return "invalid";
+ }
+ }
+
enum class ImageViewType
{
e1D = VK_IMAGE_VIEW_TYPE_1D,
@@ -4261,6 +5512,26 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class IndirectCommandsLayoutUsageFlagBitsNVX
+ {
+ eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
+ eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
+ eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
+ eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value )
+ {
+ switch ( value )
+ {
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions";
+ case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences";
+ default: return "invalid";
+ }
+ }
+
enum class IndirectCommandsTokenTypeNVX
{
ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX,
@@ -4289,6 +5560,14 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class InstanceCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
+ {
+ return "(void)";
+ }
+
enum class InternalAllocationType
{
eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
@@ -4367,6 +5646,42 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class MemoryAllocateFlagBits
+ {
+ eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+ eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+ eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT
+ };
+ using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
+ {
+ switch ( value )
+ {
+ case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask";
+ case MemoryAllocateFlagBits::eDeviceAddress : return "DeviceAddress";
+ case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
+ default: return "invalid";
+ }
+ }
+
+ enum class MemoryHeapFlagBits
+ {
+ eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+ eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+ eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
+ {
+ switch ( value )
+ {
+ case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal";
+ case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance";
+ default: return "invalid";
+ }
+ }
+
enum class MemoryOverallocationBehaviorAMD
{
eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
@@ -4385,6 +5700,34 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class MemoryPropertyFlagBits
+ {
+ eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+ eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+ eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+ eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+ eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
+ eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
+ eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
+ eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
+ {
+ switch ( value )
+ {
+ case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal";
+ case MemoryPropertyFlagBits::eHostVisible : return "HostVisible";
+ case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent";
+ case MemoryPropertyFlagBits::eHostCached : return "HostCached";
+ case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated";
+ case MemoryPropertyFlagBits::eProtected : return "Protected";
+ case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD";
+ case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD";
+ default: return "invalid";
+ }
+ }
+
enum class ObjectEntryTypeNVX
{
eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX,
@@ -4407,6 +5750,22 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ObjectEntryUsageFlagBitsNVX
+ {
+ eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
+ eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value )
+ {
+ switch ( value )
+ {
+ case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics";
+ case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute";
+ default: return "invalid";
+ }
+ }
+
enum class ObjectType
{
eUnknown = VK_OBJECT_TYPE_UNKNOWN,
@@ -4499,6 +5858,27 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class PeerMemoryFeatureFlagBits
+ {
+ eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+ eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+ eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+ eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
+ };
+ using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc";
+ case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst";
+ case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc";
+ case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst";
+ default: return "invalid";
+ }
+ }
+
enum class PerformanceConfigurationTypeINTEL
{
eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
@@ -4513,6 +5893,101 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class PerformanceCounterDescriptionFlagBitsKHR
+ {
+ ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR,
+ eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting";
+ case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted";
+ default: return "invalid";
+ }
+ }
+
+ enum class PerformanceCounterScopeKHR
+ {
+ eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+ eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+ eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+ eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
+ eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR,
+ eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterScopeKHR::eCommandBuffer : return "CommandBuffer";
+ case PerformanceCounterScopeKHR::eRenderPass : return "RenderPass";
+ case PerformanceCounterScopeKHR::eCommand : return "Command";
+ default: return "invalid";
+ }
+ }
+
+ enum class PerformanceCounterStorageKHR
+ {
+ eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+ eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
+ eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
+ eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
+ eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
+ eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterStorageKHR::eInt32 : return "Int32";
+ case PerformanceCounterStorageKHR::eInt64 : return "Int64";
+ case PerformanceCounterStorageKHR::eUint32 : return "Uint32";
+ case PerformanceCounterStorageKHR::eUint64 : return "Uint64";
+ case PerformanceCounterStorageKHR::eFloat32 : return "Float32";
+ case PerformanceCounterStorageKHR::eFloat64 : return "Float64";
+ default: return "invalid";
+ }
+ }
+
+ enum class PerformanceCounterUnitKHR
+ {
+ eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+ ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
+ eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
+ eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
+ eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
+ eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
+ eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
+ eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
+ eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
+ eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
+ eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
+ {
+ switch ( value )
+ {
+ case PerformanceCounterUnitKHR::eGeneric : return "Generic";
+ case PerformanceCounterUnitKHR::ePercentage : return "Percentage";
+ case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds";
+ case PerformanceCounterUnitKHR::eBytes : return "Bytes";
+ case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond";
+ case PerformanceCounterUnitKHR::eKelvin : return "Kelvin";
+ case PerformanceCounterUnitKHR::eWatts : return "Watts";
+ case PerformanceCounterUnitKHR::eVolts : return "Volts";
+ case PerformanceCounterUnitKHR::eAmps : return "Amps";
+ case PerformanceCounterUnitKHR::eHertz : return "Hertz";
+ case PerformanceCounterUnitKHR::eCycles : return "Cycles";
+ default: return "invalid";
+ }
+ }
+
enum class PerformanceOverrideTypeINTEL
{
eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
@@ -4607,6 +6082,14 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class PipelineCacheCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits )
+ {
+ return "(void)";
+ }
+
enum class PipelineCacheHeaderVersion
{
eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
@@ -4621,6 +6104,86 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class PipelineColorBlendStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineCompilerControlFlagBitsAMD
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineCreateFlagBits
+ {
+ eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+ eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+ eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+ eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+ eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+ eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
+ eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
+ eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+ eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
+ eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization";
+ case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives";
+ case PipelineCreateFlagBits::eDerivative : return "Derivative";
+ case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
+ case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
+ case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
+ case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
+ case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
+ default: return "invalid";
+ }
+ }
+
+ enum class PipelineCreationFeedbackFlagBitsEXT
+ {
+ eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
+ eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
+ eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid";
+ case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit";
+ case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration";
+ default: return "invalid";
+ }
+ }
+
+ enum class PipelineDepthStencilStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineDynamicStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
enum class PipelineExecutableStatisticFormatKHR
{
eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
@@ -4641,13 +6204,148 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class PipelineInputAssemblyStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineLayoutCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineMultisampleStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineRasterizationStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineShaderStageCreateFlagBits
+ {
+ eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
+ eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT";
+ case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class PipelineStageFlagBits
+ {
+ eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+ eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+ eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+ eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+ eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+ eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+ eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+ eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+ eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+ eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+ eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+ eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+ eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+ eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+ eHost = VK_PIPELINE_STAGE_HOST_BIT,
+ eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+ eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+ eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+ eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+ eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
+ eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+ eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+ eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+ eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
+ eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+ eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
+ {
+ switch ( value )
+ {
+ case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe";
+ case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect";
+ case PipelineStageFlagBits::eVertexInput : return "VertexInput";
+ case PipelineStageFlagBits::eVertexShader : return "VertexShader";
+ case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader";
+ case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader";
+ case PipelineStageFlagBits::eGeometryShader : return "GeometryShader";
+ case PipelineStageFlagBits::eFragmentShader : return "FragmentShader";
+ case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests";
+ case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests";
+ case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput";
+ case PipelineStageFlagBits::eComputeShader : return "ComputeShader";
+ case PipelineStageFlagBits::eTransfer : return "Transfer";
+ case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe";
+ case PipelineStageFlagBits::eHost : return "Host";
+ case PipelineStageFlagBits::eAllGraphics : return "AllGraphics";
+ case PipelineStageFlagBits::eAllCommands : return "AllCommands";
+ case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
+ case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
+ case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX";
+ case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
+ case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV";
+ case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV";
+ case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV";
+ case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
+ case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
+ default: return "invalid";
+ }
+ }
+
+ enum class PipelineTessellationStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineVertexInputStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class PipelineViewportStateCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
+ {
+ return "(void)";
+ }
+
enum class PointClippingBehavior
{
eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
- eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
- eAllClipPlanesKHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR,
- eUserClipPlanesOnlyKHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR
+ eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
};
+ using PointClippingBehaviorKHR = PointClippingBehavior;
VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
{
@@ -4737,6 +6435,62 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class QueryControlFlagBits
+ {
+ ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueryControlFlagBits::ePrecise : return "Precise";
+ default: return "invalid";
+ }
+ }
+
+ enum class QueryPipelineStatisticFlagBits
+ {
+ eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+ eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+ eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+ eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+ eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+ eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+ eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+ eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+ eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+ eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+ eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices";
+ case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives";
+ case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives";
+ case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations";
+ case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives";
+ case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches";
+ case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations";
+ case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations";
+ default: return "invalid";
+ }
+ }
+
+ enum class QueryPoolCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
+ {
+ return "(void)";
+ }
+
enum class QueryPoolSamplingModeINTEL
{
eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
@@ -4751,12 +6505,33 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class QueryResultFlagBits
+ {
+ e64 = VK_QUERY_RESULT_64_BIT,
+ eWait = VK_QUERY_RESULT_WAIT_BIT,
+ eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+ ePartial = VK_QUERY_RESULT_PARTIAL_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueryResultFlagBits::e64 : return "64";
+ case QueryResultFlagBits::eWait : return "Wait";
+ case QueryResultFlagBits::eWithAvailability : return "WithAvailability";
+ case QueryResultFlagBits::ePartial : return "Partial";
+ default: return "invalid";
+ }
+ }
+
enum class QueryType
{
eOcclusion = VK_QUERY_TYPE_OCCLUSION,
ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
+ ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL
};
@@ -4769,12 +6544,35 @@ namespace VULKAN_HPP_NAMESPACE
case QueryType::ePipelineStatistics : return "PipelineStatistics";
case QueryType::eTimestamp : return "Timestamp";
case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT";
+ case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR";
case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV";
case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL";
default: return "invalid";
}
}
+ enum class QueueFlagBits
+ {
+ eGraphics = VK_QUEUE_GRAPHICS_BIT,
+ eCompute = VK_QUEUE_COMPUTE_BIT,
+ eTransfer = VK_QUEUE_TRANSFER_BIT,
+ eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
+ eProtected = VK_QUEUE_PROTECTED_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
+ {
+ switch ( value )
+ {
+ case QueueFlagBits::eGraphics : return "Graphics";
+ case QueueFlagBits::eCompute : return "Compute";
+ case QueueFlagBits::eTransfer : return "Transfer";
+ case QueueFlagBits::eSparseBinding : return "SparseBinding";
+ case QueueFlagBits::eProtected : return "Protected";
+ default: return "invalid";
+ }
+ }
+
enum class QueueGlobalPriorityEXT
{
eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
@@ -4829,6 +6627,37 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class RenderPassCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class ResolveModeFlagBits
+ {
+ eNone = VK_RESOLVE_MODE_NONE,
+ eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+ eAverage = VK_RESOLVE_MODE_AVERAGE_BIT,
+ eMin = VK_RESOLVE_MODE_MIN_BIT,
+ eMax = VK_RESOLVE_MODE_MAX_BIT
+ };
+ using ResolveModeFlagBitsKHR = ResolveModeFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
+ {
+ switch ( value )
+ {
+ case ResolveModeFlagBits::eNone : return "None";
+ case ResolveModeFlagBits::eSampleZero : return "SampleZero";
+ case ResolveModeFlagBits::eAverage : return "Average";
+ case ResolveModeFlagBits::eMin : return "Min";
+ case ResolveModeFlagBits::eMax : return "Max";
+ default: return "invalid";
+ }
+ }
+
enum class Result
{
eSuccess = VK_SUCCESS,
@@ -4849,8 +6678,11 @@ namespace VULKAN_HPP_NAMESPACE
eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+ eErrorUnknown = VK_ERROR_UNKNOWN,
eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+ eErrorFragmentation = VK_ERROR_FRAGMENTATION,
+ eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
@@ -4859,12 +6691,13 @@ namespace VULKAN_HPP_NAMESPACE
eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
- eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
- eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
- eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR
+ eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+ eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
+ eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
+ eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR
};
VULKAN_HPP_INLINE std::string to_string( Result value )
@@ -4889,8 +6722,11 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorTooManyObjects : return "ErrorTooManyObjects";
case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported";
case Result::eErrorFragmentedPool : return "ErrorFragmentedPool";
+ case Result::eErrorUnknown : return "ErrorUnknown";
case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory";
case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle";
+ case Result::eErrorFragmentation : return "ErrorFragmentation";
+ case Result::eErrorInvalidOpaqueCaptureAddress : return "ErrorInvalidOpaqueCaptureAddress";
case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR";
case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR";
case Result::eSuboptimalKHR : return "SuboptimalKHR";
@@ -4899,14 +6735,38 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT";
case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV";
case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
- case Result::eErrorFragmentationEXT : return "ErrorFragmentationEXT";
case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT";
- case Result::eErrorInvalidDeviceAddressEXT : return "ErrorInvalidDeviceAddressEXT";
case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT";
default: return "invalid";
}
}
+ enum class SampleCountFlagBits
+ {
+ e1 = VK_SAMPLE_COUNT_1_BIT,
+ e2 = VK_SAMPLE_COUNT_2_BIT,
+ e4 = VK_SAMPLE_COUNT_4_BIT,
+ e8 = VK_SAMPLE_COUNT_8_BIT,
+ e16 = VK_SAMPLE_COUNT_16_BIT,
+ e32 = VK_SAMPLE_COUNT_32_BIT,
+ e64 = VK_SAMPLE_COUNT_64_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
+ {
+ switch ( value )
+ {
+ case SampleCountFlagBits::e1 : return "1";
+ case SampleCountFlagBits::e2 : return "2";
+ case SampleCountFlagBits::e4 : return "4";
+ case SampleCountFlagBits::e8 : return "8";
+ case SampleCountFlagBits::e16 : return "16";
+ case SampleCountFlagBits::e32 : return "32";
+ case SampleCountFlagBits::e64 : return "64";
+ default: return "invalid";
+ }
+ }
+
enum class SamplerAddressMode
{
eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
@@ -4930,6 +6790,22 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class SamplerCreateFlagBits
+ {
+ eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
+ eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
+ {
+ switch ( value )
+ {
+ case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
+ case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT";
+ default: return "invalid";
+ }
+ }
+
enum class SamplerMipmapMode
{
eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
@@ -4946,20 +6822,21 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SamplerReductionModeEXT
+ enum class SamplerReductionMode
{
- eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
- eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT,
- eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT
+ eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+ eMin = VK_SAMPLER_REDUCTION_MODE_MIN,
+ eMax = VK_SAMPLER_REDUCTION_MODE_MAX
};
+ using SamplerReductionModeEXT = SamplerReductionMode;
- VULKAN_HPP_INLINE std::string to_string( SamplerReductionModeEXT value )
+ VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
{
switch ( value )
{
- case SamplerReductionModeEXT::eWeightedAverage : return "WeightedAverage";
- case SamplerReductionModeEXT::eMin : return "Min";
- case SamplerReductionModeEXT::eMax : return "Max";
+ case SamplerReductionMode::eWeightedAverage : return "WeightedAverage";
+ case SamplerReductionMode::eMin : return "Min";
+ case SamplerReductionMode::eMax : return "Max";
default: return "invalid";
}
}
@@ -4970,13 +6847,9 @@ namespace VULKAN_HPP_NAMESPACE
eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
- eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
- eRgbIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR,
- eYcbcrIdentityKHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR,
- eYcbcr709KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR,
- eYcbcr601KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR,
- eYcbcr2020KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR
+ eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
};
+ using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
{
@@ -4994,10 +6867,9 @@ namespace VULKAN_HPP_NAMESPACE
enum class SamplerYcbcrRange
{
eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
- eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
- eItuFullKHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR,
- eItuNarrowKHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR
+ eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
};
+ using SamplerYcbcrRangeKHR = SamplerYcbcrRange;
VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
{
@@ -5029,36 +6901,84 @@ namespace VULKAN_HPP_NAMESPACE
}
}
- enum class SemaphoreTypeKHR
+ enum class SemaphoreCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class SemaphoreImportFlagBits
+ {
+ eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
+ };
+ using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
+ {
+ switch ( value )
+ {
+ case SemaphoreImportFlagBits::eTemporary : return "Temporary";
+ default: return "invalid";
+ }
+ }
+
+ enum class SemaphoreType
{
- eBinary = VK_SEMAPHORE_TYPE_BINARY_KHR,
- eTimeline = VK_SEMAPHORE_TYPE_TIMELINE_KHR
+ eBinary = VK_SEMAPHORE_TYPE_BINARY,
+ eTimeline = VK_SEMAPHORE_TYPE_TIMELINE
};
+ using SemaphoreTypeKHR = SemaphoreType;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreTypeKHR value )
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
{
switch ( value )
{
- case SemaphoreTypeKHR::eBinary : return "Binary";
- case SemaphoreTypeKHR::eTimeline : return "Timeline";
+ case SemaphoreType::eBinary : return "Binary";
+ case SemaphoreType::eTimeline : return "Timeline";
default: return "invalid";
}
}
- enum class ShaderFloatControlsIndependenceKHR
+ enum class SemaphoreWaitFlagBits
+ {
+ eAny = VK_SEMAPHORE_WAIT_ANY_BIT
+ };
+ using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;
+
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
+ {
+ switch ( value )
+ {
+ case SemaphoreWaitFlagBits::eAny : return "Any";
+ default: return "invalid";
+ }
+ }
+
+ enum class ShaderCorePropertiesFlagBitsAMD
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
+ {
+ return "(void)";
+ }
+
+ enum class ShaderFloatControlsIndependence
{
- e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR,
- eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR,
- eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR
+ e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+ eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+ eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE
};
+ using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
- VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependenceKHR value )
+ VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
{
switch ( value )
{
- case ShaderFloatControlsIndependenceKHR::e32BitOnly : return "32BitOnly";
- case ShaderFloatControlsIndependenceKHR::eAll : return "All";
- case ShaderFloatControlsIndependenceKHR::eNone : return "None";
+ case ShaderFloatControlsIndependence::e32BitOnly : return "32BitOnly";
+ case ShaderFloatControlsIndependence::eAll : return "All";
+ case ShaderFloatControlsIndependence::eNone : return "None";
default: return "invalid";
}
}
@@ -5081,6 +7001,58 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ShaderModuleCreateFlagBits
+ {};
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
+ {
+ return "(void)";
+ }
+
+ enum class ShaderStageFlagBits
+ {
+ eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+ eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+ eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+ eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+ eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+ eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+ eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+ eAll = VK_SHADER_STAGE_ALL,
+ eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
+ eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
+ eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
+ eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
+ eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
+ eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
+ eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
+ eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
+ {
+ switch ( value )
+ {
+ case ShaderStageFlagBits::eVertex : return "Vertex";
+ case ShaderStageFlagBits::eTessellationControl : return "TessellationControl";
+ case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation";
+ case ShaderStageFlagBits::eGeometry : return "Geometry";
+ case ShaderStageFlagBits::eFragment : return "Fragment";
+ case ShaderStageFlagBits::eCompute : return "Compute";
+ case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
+ case ShaderStageFlagBits::eAll : return "All";
+ case ShaderStageFlagBits::eRaygenNV : return "RaygenNV";
+ case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV";
+ case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV";
+ case ShaderStageFlagBits::eMissNV : return "MissNV";
+ case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV";
+ case ShaderStageFlagBits::eCallableNV : return "CallableNV";
+ case ShaderStageFlagBits::eTaskNV : return "TaskNV";
+ case ShaderStageFlagBits::eMeshNV : return "MeshNV";
+ default: return "invalid";
+ }
+ }
+
enum class ShadingRatePaletteEntryNV
{
eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
@@ -5133,6 +7105,57 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class SparseImageFormatFlagBits
+ {
+ eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+ eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+ eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
+ {
+ switch ( value )
+ {
+ case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail";
+ case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize";
+ case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize";
+ default: return "invalid";
+ }
+ }
+
+ enum class SparseMemoryBindFlagBits
+ {
+ eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
+ {
+ switch ( value )
+ {
+ case SparseMemoryBindFlagBits::eMetadata : return "Metadata";
+ default: return "invalid";
+ }
+ }
+
+ enum class StencilFaceFlagBits
+ {
+ eFront = VK_STENCIL_FACE_FRONT_BIT,
+ eBack = VK_STENCIL_FACE_BACK_BIT,
+ eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
+ eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
+ {
+ switch ( value )
+ {
+ case StencilFaceFlagBits::eFront : return "Front";
+ case StencilFaceFlagBits::eBack : return "Back";
+ case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack";
+ default: return "invalid";
+ }
+ }
+
enum class StencilOp
{
eKeep = VK_STENCIL_OP_KEEP,
@@ -5277,6 +7300,56 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+ ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
+ ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
+ ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
+ ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
+ eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+ eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+ eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+ eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+ eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+ eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+ eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+ eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+ ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+ ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+ ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+ ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+ ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+ eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+ ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+ ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+ eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+ eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+ ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+ eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+ ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+ eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+ ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+ eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+ ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+ ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+ eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+ eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+ eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+ ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+ ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+ ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+ eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+ eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+ ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+ ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+ ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+ eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+ eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+ eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+ eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+ ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+ eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+ eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+ eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+ eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
@@ -5336,7 +7409,6 @@ namespace VULKAN_HPP_NAMESPACE
eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
- ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
@@ -5360,23 +7432,19 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
- ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
- eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
- eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
- eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
- eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
- eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
- eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
- eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
- eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
- eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
- eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+ ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
+ ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
+ eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
+ ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
+ eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
+ ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
+ ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
@@ -5398,8 +7466,6 @@ namespace VULKAN_HPP_NAMESPACE
eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
- ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
- eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
@@ -5409,7 +7475,6 @@ namespace VULKAN_HPP_NAMESPACE
ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
- eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
@@ -5425,11 +7490,6 @@ namespace VULKAN_HPP_NAMESPACE
eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
- eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
- ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
- ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
- eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
- eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
@@ -5450,12 +7510,9 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
- ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
- ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
- ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
@@ -5466,10 +7523,6 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
- ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
- ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
- ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
- eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
@@ -5479,12 +7532,6 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
- ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
- ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
- eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
- eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
- eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
- eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
@@ -5492,7 +7539,6 @@ namespace VULKAN_HPP_NAMESPACE
ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
- ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
@@ -5501,7 +7547,6 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
- ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
@@ -5512,13 +7557,9 @@ namespace VULKAN_HPP_NAMESPACE
eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
- ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
- eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
- eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
- eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
- eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+ ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
@@ -5528,7 +7569,6 @@ namespace VULKAN_HPP_NAMESPACE
eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
- ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
@@ -5536,7 +7576,6 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
- ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
@@ -5582,9 +7621,21 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+ ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+ ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+ eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
+ eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
+ eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+ eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+ eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+ eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+ eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+ eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+ eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+ eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
@@ -5596,11 +7647,14 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+ ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+ eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+ eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
@@ -5609,9 +7663,41 @@ namespace VULKAN_HPP_NAMESPACE
eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+ eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+ ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+ ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+ eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+ eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
- ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT
+ ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+ ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+ ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+ ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+ ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
+ ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
+ eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+ ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
+ ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+ eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
+ eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+ eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
+ eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
+ ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+ ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+ ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+ eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+ eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+ ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+ eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+ eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+ ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+ ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+ eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+ eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+ eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
+ eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+ ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT
};
VULKAN_HPP_INLINE std::string to_string( StructureType value )
@@ -5732,6 +7818,56 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties";
case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport";
case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures";
+ case StructureType::ePhysicalDeviceVulkan11Features : return "PhysicalDeviceVulkan11Features";
+ case StructureType::ePhysicalDeviceVulkan11Properties : return "PhysicalDeviceVulkan11Properties";
+ case StructureType::ePhysicalDeviceVulkan12Features : return "PhysicalDeviceVulkan12Features";
+ case StructureType::ePhysicalDeviceVulkan12Properties : return "PhysicalDeviceVulkan12Properties";
+ case StructureType::eImageFormatListCreateInfo : return "ImageFormatListCreateInfo";
+ case StructureType::eAttachmentDescription2 : return "AttachmentDescription2";
+ case StructureType::eAttachmentReference2 : return "AttachmentReference2";
+ case StructureType::eSubpassDescription2 : return "SubpassDescription2";
+ case StructureType::eSubpassDependency2 : return "SubpassDependency2";
+ case StructureType::eRenderPassCreateInfo2 : return "RenderPassCreateInfo2";
+ case StructureType::eSubpassBeginInfo : return "SubpassBeginInfo";
+ case StructureType::eSubpassEndInfo : return "SubpassEndInfo";
+ case StructureType::ePhysicalDevice8BitStorageFeatures : return "PhysicalDevice8BitStorageFeatures";
+ case StructureType::ePhysicalDeviceDriverProperties : return "PhysicalDeviceDriverProperties";
+ case StructureType::ePhysicalDeviceShaderAtomicInt64Features : return "PhysicalDeviceShaderAtomicInt64Features";
+ case StructureType::ePhysicalDeviceShaderFloat16Int8Features : return "PhysicalDeviceShaderFloat16Int8Features";
+ case StructureType::ePhysicalDeviceFloatControlsProperties : return "PhysicalDeviceFloatControlsProperties";
+ case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo : return "DescriptorSetLayoutBindingFlagsCreateInfo";
+ case StructureType::ePhysicalDeviceDescriptorIndexingFeatures : return "PhysicalDeviceDescriptorIndexingFeatures";
+ case StructureType::ePhysicalDeviceDescriptorIndexingProperties : return "PhysicalDeviceDescriptorIndexingProperties";
+ case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo : return "DescriptorSetVariableDescriptorCountAllocateInfo";
+ case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport : return "DescriptorSetVariableDescriptorCountLayoutSupport";
+ case StructureType::ePhysicalDeviceDepthStencilResolveProperties : return "PhysicalDeviceDepthStencilResolveProperties";
+ case StructureType::eSubpassDescriptionDepthStencilResolve : return "SubpassDescriptionDepthStencilResolve";
+ case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures : return "PhysicalDeviceScalarBlockLayoutFeatures";
+ case StructureType::eImageStencilUsageCreateInfo : return "ImageStencilUsageCreateInfo";
+ case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties : return "PhysicalDeviceSamplerFilterMinmaxProperties";
+ case StructureType::eSamplerReductionModeCreateInfo : return "SamplerReductionModeCreateInfo";
+ case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures : return "PhysicalDeviceVulkanMemoryModelFeatures";
+ case StructureType::ePhysicalDeviceImagelessFramebufferFeatures : return "PhysicalDeviceImagelessFramebufferFeatures";
+ case StructureType::eFramebufferAttachmentsCreateInfo : return "FramebufferAttachmentsCreateInfo";
+ case StructureType::eFramebufferAttachmentImageInfo : return "FramebufferAttachmentImageInfo";
+ case StructureType::eRenderPassAttachmentBeginInfo : return "RenderPassAttachmentBeginInfo";
+ case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures : return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
+ case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures : return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
+ case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures : return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
+ case StructureType::eAttachmentReferenceStencilLayout : return "AttachmentReferenceStencilLayout";
+ case StructureType::eAttachmentDescriptionStencilLayout : return "AttachmentDescriptionStencilLayout";
+ case StructureType::ePhysicalDeviceHostQueryResetFeatures : return "PhysicalDeviceHostQueryResetFeatures";
+ case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures : return "PhysicalDeviceTimelineSemaphoreFeatures";
+ case StructureType::ePhysicalDeviceTimelineSemaphoreProperties : return "PhysicalDeviceTimelineSemaphoreProperties";
+ case StructureType::eSemaphoreTypeCreateInfo : return "SemaphoreTypeCreateInfo";
+ case StructureType::eTimelineSemaphoreSubmitInfo : return "TimelineSemaphoreSubmitInfo";
+ case StructureType::eSemaphoreWaitInfo : return "SemaphoreWaitInfo";
+ case StructureType::eSemaphoreSignalInfo : return "SemaphoreSignalInfo";
+ case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures : return "PhysicalDeviceBufferDeviceAddressFeatures";
+ case StructureType::eBufferDeviceAddressInfo : return "BufferDeviceAddressInfo";
+ case StructureType::eBufferOpaqueCaptureAddressCreateInfo : return "BufferOpaqueCaptureAddressCreateInfo";
+ case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo : return "MemoryOpaqueCaptureAddressAllocateInfo";
+ case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo : return "DeviceMemoryOpaqueCaptureAddressInfo";
case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR";
case StructureType::ePresentInfoKHR : return "PresentInfoKHR";
case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR";
@@ -5791,7 +7927,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT";
case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT";
case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT";
- case StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR : return "PhysicalDeviceShaderFloat16Int8FeaturesKHR";
case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR";
case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX";
case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX";
@@ -5815,23 +7950,19 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT";
case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT";
case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT";
- case StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR : return "PhysicalDeviceImagelessFramebufferFeaturesKHR";
- case StructureType::eFramebufferAttachmentsCreateInfoKHR : return "FramebufferAttachmentsCreateInfoKHR";
- case StructureType::eFramebufferAttachmentImageInfoKHR : return "FramebufferAttachmentImageInfoKHR";
- case StructureType::eRenderPassAttachmentBeginInfoKHR : return "RenderPassAttachmentBeginInfoKHR";
- case StructureType::eAttachmentDescription2KHR : return "AttachmentDescription2KHR";
- case StructureType::eAttachmentReference2KHR : return "AttachmentReference2KHR";
- case StructureType::eSubpassDescription2KHR : return "SubpassDescription2KHR";
- case StructureType::eSubpassDependency2KHR : return "SubpassDependency2KHR";
- case StructureType::eRenderPassCreateInfo2KHR : return "RenderPassCreateInfo2KHR";
- case StructureType::eSubpassBeginInfoKHR : return "SubpassBeginInfoKHR";
- case StructureType::eSubpassEndInfoKHR : return "SubpassEndInfoKHR";
case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR";
case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR";
case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR";
case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR";
case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR";
case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR";
+ case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR";
+ case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR";
+ case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR";
+ case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR";
+ case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR";
+ case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR";
+ case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR";
case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR";
case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR";
case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR";
@@ -5853,8 +7984,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID";
case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID";
case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID";
- case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT : return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT";
- case StructureType::eSamplerReductionModeCreateInfoEXT : return "SamplerReductionModeCreateInfoEXT";
case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT";
case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT";
case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT";
@@ -5864,7 +7993,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT";
case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT";
case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT";
- case StructureType::eImageFormatListCreateInfoKHR : return "ImageFormatListCreateInfoKHR";
case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT";
@@ -5880,11 +8008,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT";
case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT";
case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT";
- case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT : return "DescriptorSetLayoutBindingFlagsCreateInfoEXT";
- case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT : return "PhysicalDeviceDescriptorIndexingFeaturesEXT";
- case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT : return "PhysicalDeviceDescriptorIndexingPropertiesEXT";
- case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT : return "DescriptorSetVariableDescriptorCountAllocateInfoEXT";
- case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT : return "DescriptorSetVariableDescriptorCountLayoutSupportEXT";
case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV";
case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV";
case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV";
@@ -5905,12 +8028,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT";
case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT";
case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT";
- case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR : return "PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR";
- case StructureType::ePhysicalDevice8BitStorageFeaturesKHR : return "PhysicalDevice8BitStorageFeaturesKHR";
case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT";
case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT";
case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
- case StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR : return "PhysicalDeviceShaderAtomicInt64FeaturesKHR";
case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR";
case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD";
case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT";
@@ -5921,10 +8041,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP";
case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT";
- case StructureType::ePhysicalDeviceDriverPropertiesKHR : return "PhysicalDeviceDriverPropertiesKHR";
- case StructureType::ePhysicalDeviceFloatControlsPropertiesKHR : return "PhysicalDeviceFloatControlsPropertiesKHR";
- case StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR : return "PhysicalDeviceDepthStencilResolvePropertiesKHR";
- case StructureType::eSubpassDescriptionDepthStencilResolveKHR : return "SubpassDescriptionDepthStencilResolveKHR";
case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV";
case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV";
@@ -5934,12 +8050,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV";
case StructureType::eCheckpointDataNV : return "CheckpointDataNV";
case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV";
- case StructureType::ePhysicalDeviceTimelineSemaphoreFeaturesKHR : return "PhysicalDeviceTimelineSemaphoreFeaturesKHR";
- case StructureType::ePhysicalDeviceTimelineSemaphorePropertiesKHR : return "PhysicalDeviceTimelineSemaphorePropertiesKHR";
- case StructureType::eSemaphoreTypeCreateInfoKHR : return "SemaphoreTypeCreateInfoKHR";
- case StructureType::eTimelineSemaphoreSubmitInfoKHR : return "TimelineSemaphoreSubmitInfoKHR";
- case StructureType::eSemaphoreWaitInfoKHR : return "SemaphoreWaitInfoKHR";
- case StructureType::eSemaphoreSignalInfoKHR : return "SemaphoreSignalInfoKHR";
case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
case StructureType::eQueryPoolCreateInfoINTEL : return "QueryPoolCreateInfoINTEL";
case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL";
@@ -5947,7 +8057,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL";
case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL";
case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL";
- case StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR : return "PhysicalDeviceVulkanMemoryModelFeaturesKHR";
case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT";
case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD";
case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD";
@@ -5956,7 +8065,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT";
- case StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT : return "PhysicalDeviceScalarBlockLayoutFeaturesEXT";
case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT";
case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT";
case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT";
@@ -5967,13 +8075,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT";
case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR";
case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
- case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR : return "PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR";
- case StructureType::eAttachmentReferenceStencilLayoutKHR : return "AttachmentReferenceStencilLayoutKHR";
- case StructureType::eAttachmentDescriptionStencilLayoutKHR : return "AttachmentDescriptionStencilLayoutKHR";
case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
- case StructureType::eBufferDeviceAddressInfoEXT : return "BufferDeviceAddressInfoEXT";
case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT";
- case StructureType::eImageStencilUsageCreateInfoEXT : return "ImageStencilUsageCreateInfoEXT";
+ case StructureType::ePhysicalDeviceToolPropertiesEXT : return "PhysicalDeviceToolPropertiesEXT";
case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT";
case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV";
case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV";
@@ -5983,7 +8087,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV";
case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
- case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR : return "PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR";
case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT";
case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT";
case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT";
@@ -5991,7 +8094,6 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT";
case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT";
case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT";
- case StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT : return "PhysicalDeviceHostQueryResetFeaturesEXT";
case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR";
@@ -6006,6 +8108,36 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class SubgroupFeatureFlagBits
+ {
+ eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
+ eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
+ eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+ eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
+ eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+ eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+ eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+ eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
+ ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
+ {
+ switch ( value )
+ {
+ case SubgroupFeatureFlagBits::eBasic : return "Basic";
+ case SubgroupFeatureFlagBits::eVote : return "Vote";
+ case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic";
+ case SubgroupFeatureFlagBits::eBallot : return "Ballot";
+ case SubgroupFeatureFlagBits::eShuffle : return "Shuffle";
+ case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative";
+ case SubgroupFeatureFlagBits::eClustered : return "Clustered";
+ case SubgroupFeatureFlagBits::eQuad : return "Quad";
+ case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV";
+ default: return "invalid";
+ }
+ }
+
enum class SubpassContents
{
eInline = VK_SUBPASS_CONTENTS_INLINE,
@@ -6022,6 +8154,84 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class SubpassDescriptionFlagBits
+ {
+ ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+ ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
+ {
+ switch ( value )
+ {
+ case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
+ case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
+ default: return "invalid";
+ }
+ }
+
+ enum class SurfaceCounterFlagBitsEXT
+ {
+ eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank";
+ default: return "invalid";
+ }
+ }
+
+ enum class SurfaceTransformFlagBitsKHR
+ {
+ eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+ eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+ eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+ eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+ eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+ eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+ eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+ eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+ eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity";
+ case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90";
+ case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180";
+ case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180";
+ case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270";
+ case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit";
+ default: return "invalid";
+ }
+ }
+
+ enum class SwapchainCreateFlagBitsKHR
+ {
+ eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+ eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
+ eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
+ {
+ switch ( value )
+ {
+ case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
+ case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
+ case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
+ default: return "invalid";
+ }
+ }
+
enum class SystemAllocationScope
{
eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
@@ -6047,10 +8257,9 @@ namespace VULKAN_HPP_NAMESPACE
enum class TessellationDomainOrigin
{
eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
- eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
- eUpperLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR,
- eLowerLeftKHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR
+ eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
};
+ using TessellationDomainOriginKHR = TessellationDomainOrigin;
VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
{
@@ -6082,6 +8291,32 @@ namespace VULKAN_HPP_NAMESPACE
}
}
+ enum class ToolPurposeFlagBitsEXT
+ {
+ eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT,
+ eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT,
+ eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT,
+ eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT,
+ eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT,
+ eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
+ eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT
+ };
+
+ VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value )
+ {
+ switch ( value )
+ {
+ case ToolPurposeFlagBitsEXT::eValidation : return "Validation";
+ case ToolPurposeFlagBitsEXT::eProfiling : return "Profiling";
+ case ToolPurposeFlagBitsEXT::eTracing : return "Tracing";
+ case ToolPurposeFlagBitsEXT::eAdditionalFeatures : return "AdditionalFeatures";
+ case ToolPurposeFlagBitsEXT::eModifyingFeatures : return "ModifyingFeatures";
+ case ToolPurposeFlagBitsEXT::eDebugReporting : return "DebugReporting";
+ case ToolPurposeFlagBitsEXT::eDebugMarkers : return "DebugMarkers";
+ default: return "invalid";
+ }
+ }
+
enum class ValidationCacheHeaderVersionEXT
{
eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
@@ -6223,74 +8458,6 @@ namespace VULKAN_HPP_NAMESPACE
{
};
- enum class AccessFlagBits
- {
- eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
- eIndexRead = VK_ACCESS_INDEX_READ_BIT,
- eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
- eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
- eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
- eShaderRead = VK_ACCESS_SHADER_READ_BIT,
- eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
- eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
- eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
- eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
- eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
- eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
- eHostRead = VK_ACCESS_HOST_READ_BIT,
- eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
- eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
- eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
- eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
- eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
- eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
- eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
- eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
- eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX,
- eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
- eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
- eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
- eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
- eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
- {
- switch ( value )
- {
- case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead";
- case AccessFlagBits::eIndexRead : return "IndexRead";
- case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead";
- case AccessFlagBits::eUniformRead : return "UniformRead";
- case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead";
- case AccessFlagBits::eShaderRead : return "ShaderRead";
- case AccessFlagBits::eShaderWrite : return "ShaderWrite";
- case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead";
- case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite";
- case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
- case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
- case AccessFlagBits::eTransferRead : return "TransferRead";
- case AccessFlagBits::eTransferWrite : return "TransferWrite";
- case AccessFlagBits::eHostRead : return "HostRead";
- case AccessFlagBits::eHostWrite : return "HostWrite";
- case AccessFlagBits::eMemoryRead : return "MemoryRead";
- case AccessFlagBits::eMemoryWrite : return "MemoryWrite";
- case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
- case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
- case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
- case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
- case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX";
- case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX";
- case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
- case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV";
- case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV";
- case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV";
- case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
- default: return "invalid";
- }
- }
-
using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
template <> struct FlagTraits<AccessFlagBits>
@@ -6321,16 +8488,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( AccessFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AccessFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
{
if ( !value ) return "{}";
@@ -6367,6 +8524,13 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
+ using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR, VkAcquireProfilingLockFlagsKHR>;
+
+ VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
+ {
+ return "{}";
+ }
+
#ifdef VK_USE_PLATFORM_ANDROID_KHR
enum class AndroidSurfaceCreateFlagBitsKHR
{};
@@ -6384,20 +8548,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- enum class AttachmentDescriptionFlagBits
- {
- eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
- {
- switch ( value )
- {
- case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias";
- default: return "invalid";
- }
- }
-
using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
template <> struct FlagTraits<AttachmentDescriptionFlagBits>
@@ -6428,16 +8578,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( AttachmentDescriptionFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AttachmentDescriptionFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return AttachmentDescriptionFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
{
if ( !value ) return "{}";
@@ -6447,35 +8587,13 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class BufferCreateFlagBits
- {
- eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
- eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
- eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
- eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
- eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
- {
- switch ( value )
- {
- case BufferCreateFlagBits::eSparseBinding : return "SparseBinding";
- case BufferCreateFlagBits::eSparseResidency : return "SparseResidency";
- case BufferCreateFlagBits::eSparseAliased : return "SparseAliased";
- case BufferCreateFlagBits::eProtected : return "Protected";
- case BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT : return "DeviceAddressCaptureReplayEXT";
- default: return "invalid";
- }
- }
-
using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
template <> struct FlagTraits<BufferCreateFlagBits>
{
enum
{
- allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT)
+ allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay)
};
};
@@ -6499,16 +8617,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( BufferCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
{
if ( !value ) return "{}";
@@ -6518,57 +8626,17 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | ";
- if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT ) result += "DeviceAddressCaptureReplayEXT | ";
+ if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class BufferUsageFlagBits
- {
- eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
- eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
- eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
- eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
- eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
- eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
- eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
- eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
- eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
- eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
- eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
- eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
- eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
- eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
- {
- switch ( value )
- {
- case BufferUsageFlagBits::eTransferSrc : return "TransferSrc";
- case BufferUsageFlagBits::eTransferDst : return "TransferDst";
- case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
- case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
- case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer";
- case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer";
- case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer";
- case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer";
- case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer";
- case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
- case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
- case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
- case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV";
- case BufferUsageFlagBits::eShaderDeviceAddressEXT : return "ShaderDeviceAddressEXT";
- default: return "invalid";
- }
- }
-
using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
template <> struct FlagTraits<BufferUsageFlagBits>
{
enum
{
- allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddressEXT)
+ allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV)
};
};
@@ -6592,16 +8660,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( BufferUsageFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferUsageFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BufferUsageFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
{
if ( !value ) return "{}";
@@ -6616,22 +8674,14 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | ";
if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | ";
+ if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | ";
if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | ";
if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | ";
if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | ";
- if ( value & BufferUsageFlagBits::eShaderDeviceAddressEXT ) result += "ShaderDeviceAddressEXT | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class BufferViewCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
- {
- return "(void)";
- }
-
using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
@@ -6639,28 +8689,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class BuildAccelerationStructureFlagBitsNV
- {
- eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV,
- eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV,
- ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV,
- ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV,
- eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value )
- {
- switch ( value )
- {
- case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate";
- case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction";
- case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace";
- case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild";
- case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory";
- default: return "invalid";
- }
- }
-
using BuildAccelerationStructureFlagsNV = Flags<BuildAccelerationStructureFlagBitsNV, VkBuildAccelerationStructureFlagsNV>;
template <> struct FlagTraits<BuildAccelerationStructureFlagBitsNV>
@@ -6691,16 +8719,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( BuildAccelerationStructureFlagsNV( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BuildAccelerationStructureFlagsNV( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return BuildAccelerationStructureFlagsNV( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value )
{
if ( !value ) return "{}";
@@ -6714,26 +8732,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ColorComponentFlagBits
- {
- eR = VK_COLOR_COMPONENT_R_BIT,
- eG = VK_COLOR_COMPONENT_G_BIT,
- eB = VK_COLOR_COMPONENT_B_BIT,
- eA = VK_COLOR_COMPONENT_A_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
- {
- switch ( value )
- {
- case ColorComponentFlagBits::eR : return "R";
- case ColorComponentFlagBits::eG : return "G";
- case ColorComponentFlagBits::eB : return "B";
- case ColorComponentFlagBits::eA : return "A";
- default: return "invalid";
- }
- }
-
using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
template <> struct FlagTraits<ColorComponentFlagBits>
@@ -6764,16 +8762,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ColorComponentFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ColorComponentFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ColorComponentFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
{
if ( !value ) return "{}";
@@ -6786,20 +8774,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class CommandBufferResetFlagBits
- {
- eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
- {
- switch ( value )
- {
- case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources";
- default: return "invalid";
- }
- }
-
using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
template <> struct FlagTraits<CommandBufferResetFlagBits>
@@ -6830,16 +8804,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( CommandBufferResetFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferResetFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferResetFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
{
if ( !value ) return "{}";
@@ -6849,24 +8813,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class CommandBufferUsageFlagBits
- {
- eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
- eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
- eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
- {
- switch ( value )
- {
- case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit";
- case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue";
- case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse";
- default: return "invalid";
- }
- }
-
using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
template <> struct FlagTraits<CommandBufferUsageFlagBits>
@@ -6897,16 +8843,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( CommandBufferUsageFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferUsageFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandBufferUsageFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
{
if ( !value ) return "{}";
@@ -6918,24 +8854,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class CommandPoolCreateFlagBits
- {
- eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
- eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
- eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
- {
- switch ( value )
- {
- case CommandPoolCreateFlagBits::eTransient : return "Transient";
- case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer";
- case CommandPoolCreateFlagBits::eProtected : return "Protected";
- default: return "invalid";
- }
- }
-
using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
template <> struct FlagTraits<CommandPoolCreateFlagBits>
@@ -6966,16 +8884,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( CommandPoolCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
{
if ( !value ) return "{}";
@@ -6987,20 +8895,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class CommandPoolResetFlagBits
- {
- eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
- {
- switch ( value )
- {
- case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources";
- default: return "invalid";
- }
- }
-
using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
template <> struct FlagTraits<CommandPoolResetFlagBits>
@@ -7031,16 +8925,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( CommandPoolResetFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolResetFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CommandPoolResetFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
{
if ( !value ) return "{}";
@@ -7067,26 +8951,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class CompositeAlphaFlagBitsKHR
- {
- eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
- ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
- ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
- eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
- {
- switch ( value )
- {
- case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque";
- case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied";
- case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied";
- case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit";
- default: return "invalid";
- }
- }
-
using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
@@ -7117,16 +8981,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( CompositeAlphaFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CompositeAlphaFlagsKHR( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CompositeAlphaFlagsKHR( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
{
if ( !value ) return "{}";
@@ -7139,20 +8993,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ConditionalRenderingFlagBitsEXT
- {
- eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
- {
- switch ( value )
- {
- case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted";
- default: return "invalid";
- }
- }
-
using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT, VkConditionalRenderingFlagsEXT>;
template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
@@ -7183,16 +9023,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ConditionalRenderingFlagsEXT( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ConditionalRenderingFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ConditionalRenderingFlagsEXT( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
{
if ( !value ) return "{}";
@@ -7202,26 +9032,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class CullModeFlagBits
- {
- eNone = VK_CULL_MODE_NONE,
- eFront = VK_CULL_MODE_FRONT_BIT,
- eBack = VK_CULL_MODE_BACK_BIT,
- eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
- };
-
- VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
- {
- switch ( value )
- {
- case CullModeFlagBits::eNone : return "None";
- case CullModeFlagBits::eFront : return "Front";
- case CullModeFlagBits::eBack : return "Back";
- case CullModeFlagBits::eFrontAndBack : return "FrontAndBack";
- default: return "invalid";
- }
- }
-
using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
template <> struct FlagTraits<CullModeFlagBits>
@@ -7252,16 +9062,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( CullModeFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CullModeFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return CullModeFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
{
if ( !value ) return "{}";
@@ -7272,28 +9072,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DebugReportFlagBitsEXT
- {
- eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
- eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
- ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
- eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
- {
- switch ( value )
- {
- case DebugReportFlagBitsEXT::eInformation : return "Information";
- case DebugReportFlagBitsEXT::eWarning : return "Warning";
- case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning";
- case DebugReportFlagBitsEXT::eError : return "Error";
- case DebugReportFlagBitsEXT::eDebug : return "Debug";
- default: return "invalid";
- }
- }
-
using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
template <> struct FlagTraits<DebugReportFlagBitsEXT>
@@ -7324,16 +9102,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DebugReportFlagsEXT( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugReportFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugReportFlagsEXT( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
{
if ( !value ) return "{}";
@@ -7347,26 +9115,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DebugUtilsMessageSeverityFlagBitsEXT
- {
- eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
- eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
- eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
- eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
- {
- switch ( value )
- {
- case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose";
- case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info";
- case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning";
- case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error";
- default: return "invalid";
- }
- }
-
using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT, VkDebugUtilsMessageSeverityFlagsEXT>;
template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
@@ -7397,16 +9145,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageSeverityFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageSeverityFlagsEXT( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
{
if ( !value ) return "{}";
@@ -7419,24 +9157,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DebugUtilsMessageTypeFlagBitsEXT
- {
- eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
- eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
- ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
- {
- switch ( value )
- {
- case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General";
- case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation";
- case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance";
- default: return "invalid";
- }
- }
-
using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT, VkDebugUtilsMessageTypeFlagsEXT>;
template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
@@ -7467,16 +9187,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageTypeFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DebugUtilsMessageTypeFlagsEXT( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
{
if ( !value ) return "{}";
@@ -7518,26 +9228,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class DependencyFlagBits
- {
- eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
- eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
- eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
- eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR,
- eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
- {
- switch ( value )
- {
- case DependencyFlagBits::eByRegion : return "ByRegion";
- case DependencyFlagBits::eDeviceGroup : return "DeviceGroup";
- case DependencyFlagBits::eViewLocal : return "ViewLocal";
- default: return "invalid";
- }
- }
-
using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
template <> struct FlagTraits<DependencyFlagBits>
@@ -7568,16 +9258,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DependencyFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DependencyFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DependencyFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
{
if ( !value ) return "{}";
@@ -7589,101 +9269,57 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DescriptorBindingFlagBitsEXT
- {
- eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT,
- eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT,
- ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT,
- eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT
- };
+ using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits, VkDescriptorBindingFlags>;
- VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBitsEXT value )
- {
- switch ( value )
- {
- case DescriptorBindingFlagBitsEXT::eUpdateAfterBind : return "UpdateAfterBind";
- case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending";
- case DescriptorBindingFlagBitsEXT::ePartiallyBound : return "PartiallyBound";
- case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount : return "VariableDescriptorCount";
- default: return "invalid";
- }
- }
-
- using DescriptorBindingFlagsEXT = Flags<DescriptorBindingFlagBitsEXT, VkDescriptorBindingFlagsEXT>;
-
- template <> struct FlagTraits<DescriptorBindingFlagBitsEXT>
+ template <> struct FlagTraits<DescriptorBindingFlagBits>
{
enum
{
- allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount)
+ allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBits::ePartiallyBound) | VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return DescriptorBindingFlagsEXT( bit0 ) | bit1;
+ return DescriptorBindingFlags( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator&( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return DescriptorBindingFlagsEXT( bit0 ) & bit1;
+ return DescriptorBindingFlags( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator^( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return DescriptorBindingFlagsEXT( bit0 ) ^ bit1;
+ return DescriptorBindingFlags( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) VULKAN_HPP_NOEXCEPT
{
- return ~( DescriptorBindingFlagsEXT( bits ) );
+ return ~( DescriptorBindingFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorBindingFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorBindingFlagsEXT( bit0 ) != bit1;
- }
+ using DescriptorBindingFlagsEXT = DescriptorBindingFlags;
- VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagsEXT value )
+ VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
{
if ( !value ) return "{}";
std::string result;
- if ( value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind ) result += "UpdateAfterBind | ";
- if ( value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | ";
- if ( value & DescriptorBindingFlagBitsEXT::ePartiallyBound ) result += "PartiallyBound | ";
- if ( value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount ) result += "VariableDescriptorCount | ";
+ if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
+ if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | ";
+ if ( value & DescriptorBindingFlagBits::ePartiallyBound ) result += "PartiallyBound | ";
+ if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) result += "VariableDescriptorCount | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DescriptorPoolCreateFlagBits
- {
- eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
- eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
- {
- switch ( value )
- {
- case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet";
- case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT : return "UpdateAfterBindEXT";
- default: return "invalid";
- }
- }
-
using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
{
enum
{
- allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT)
+ allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind)
};
};
@@ -7707,23 +9343,13 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DescriptorPoolCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorPoolCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorPoolCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
{
if ( !value ) return "{}";
std::string result;
if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | ";
- if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT ) result += "UpdateAfterBindEXT | ";
+ if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
@@ -7742,29 +9368,13 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class DescriptorSetLayoutCreateFlagBits
- {
- ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
- eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
- {
- switch ( value )
- {
- case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR";
- case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT : return "UpdateAfterBindPoolEXT";
- default: return "invalid";
- }
- }
-
using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
{
enum
{
- allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT)
+ allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) | VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR)
};
};
@@ -7788,23 +9398,13 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DescriptorSetLayoutCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorSetLayoutCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DescriptorSetLayoutCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
{
if ( !value ) return "{}";
std::string result;
+ if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | ";
if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | ";
- if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT ) result += "UpdateAfterBindPoolEXT | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
@@ -7825,14 +9425,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class DeviceCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
- {
- return "(void)";
- }
-
using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
@@ -7840,26 +9432,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class DeviceGroupPresentModeFlagBitsKHR
- {
- eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
- eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
- eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
- eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
- {
- switch ( value )
- {
- case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local";
- case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote";
- case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum";
- case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice";
- default: return "invalid";
- }
- }
-
using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR, VkDeviceGroupPresentModeFlagsKHR>;
template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
@@ -7890,16 +9462,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceGroupPresentModeFlagsKHR( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceGroupPresentModeFlagsKHR( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
{
if ( !value ) return "{}";
@@ -7912,20 +9474,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class DeviceQueueCreateFlagBits
- {
- eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
- {
- switch ( value )
- {
- case DeviceQueueCreateFlagBits::eProtected : return "Protected";
- default: return "invalid";
- }
- }
-
using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
template <> struct FlagTraits<DeviceQueueCreateFlagBits>
@@ -7956,16 +9504,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DeviceQueueCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceQueueCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DeviceQueueCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
{
if ( !value ) return "{}";
@@ -7990,26 +9528,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class DisplayPlaneAlphaFlagBitsKHR
- {
- eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
- eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
- ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
- ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
- {
- switch ( value )
- {
- case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque";
- case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global";
- case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel";
- case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied";
- default: return "invalid";
- }
- }
-
using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
@@ -8040,16 +9558,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DisplayPlaneAlphaFlagsKHR( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return DisplayPlaneAlphaFlagsKHR( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
{
if ( !value ) return "{}";
@@ -8092,24 +9600,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class ExternalFenceFeatureFlagBits
- {
- eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
- eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
- eExportableKHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR,
- eImportableKHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
- {
- switch ( value )
- {
- case ExternalFenceFeatureFlagBits::eExportable : return "Exportable";
- case ExternalFenceFeatureFlagBits::eImportable : return "Importable";
- default: return "invalid";
- }
- }
-
using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits, VkExternalFenceFeatureFlags>;
template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
@@ -8140,16 +9630,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalFenceFeatureFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceFeatureFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceFeatureFlags( bit0 ) != bit1;
- }
-
using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
@@ -8162,30 +9642,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalFenceHandleTypeFlagBits
- {
- eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
- eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
- eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
- eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
- eOpaqueFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
- eOpaqueWin32KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
- eOpaqueWin32KmtKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
- eSyncFdKHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
- {
- switch ( value )
- {
- case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
- case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
- case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
- case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd";
- default: return "invalid";
- }
- }
-
using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits, VkExternalFenceHandleTypeFlags>;
template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
@@ -8216,16 +9672,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalFenceHandleTypeFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceHandleTypeFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalFenceHandleTypeFlags( bit0 ) != bit1;
- }
-
using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
@@ -8240,27 +9686,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalMemoryFeatureFlagBits
- {
- eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
- eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
- eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
- eDedicatedOnlyKHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR,
- eExportableKHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR,
- eImportableKHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
- {
- switch ( value )
- {
- case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly";
- case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable";
- case ExternalMemoryFeatureFlagBits::eImportable : return "Importable";
- default: return "invalid";
- }
- }
-
using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits, VkExternalMemoryFeatureFlags>;
template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
@@ -8291,16 +9716,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalMemoryFeatureFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlags( bit0 ) != bit1;
- }
-
using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
@@ -8314,24 +9729,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalMemoryFeatureFlagBitsNV
- {
- eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
- eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
- eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
- {
- switch ( value )
- {
- case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly";
- case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable";
- case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable";
- default: return "invalid";
- }
- }
-
using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
@@ -8362,16 +9759,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalMemoryFeatureFlagsNV( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlagsNV( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryFeatureFlagsNV( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
{
if ( !value ) return "{}";
@@ -8383,47 +9770,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalMemoryHandleTypeFlagBits
- {
- eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
- eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
- eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
- eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
- eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
- eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
- eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
- eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
- eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
- eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
- eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT,
- eOpaqueFdKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
- eOpaqueWin32KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
- eOpaqueWin32KmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
- eD3D11TextureKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR,
- eD3D11TextureKmtKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR,
- eD3D12HeapKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR,
- eD3D12ResourceKHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
- {
- switch ( value )
- {
- case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
- case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
- case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
- case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture";
- case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt";
- case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap";
- case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource";
- case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT";
- case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID";
- case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT";
- case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT";
- default: return "invalid";
- }
- }
-
using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits, VkExternalMemoryHandleTypeFlags>;
template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
@@ -8454,16 +9800,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalMemoryHandleTypeFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlags( bit0 ) != bit1;
- }
-
using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
@@ -8485,26 +9821,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalMemoryHandleTypeFlagBitsNV
- {
- eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
- eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
- eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
- eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
- {
- switch ( value )
- {
- case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32";
- case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
- case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image";
- case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt";
- default: return "invalid";
- }
- }
-
using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
@@ -8535,16 +9851,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlagsNV( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalMemoryHandleTypeFlagsNV( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
{
if ( !value ) return "{}";
@@ -8557,24 +9863,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalSemaphoreFeatureFlagBits
- {
- eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
- eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
- eExportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR,
- eImportableKHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
- {
- switch ( value )
- {
- case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable";
- case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable";
- default: return "invalid";
- }
- }
-
using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits, VkExternalSemaphoreFeatureFlags>;
template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
@@ -8605,16 +9893,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalSemaphoreFeatureFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreFeatureFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreFeatureFlags( bit0 ) != bit1;
- }
-
using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
@@ -8627,33 +9905,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ExternalSemaphoreHandleTypeFlagBits
- {
- eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
- eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
- eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
- eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
- eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
- eOpaqueFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
- eOpaqueWin32KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
- eOpaqueWin32KmtKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR,
- eD3D12FenceKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR,
- eSyncFdKHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
- {
- switch ( value )
- {
- case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
- case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
- case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
- case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence";
- case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd";
- default: return "invalid";
- }
- }
-
using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits, VkExternalSemaphoreHandleTypeFlags>;
template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
@@ -8684,16 +9935,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreHandleTypeFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ExternalSemaphoreHandleTypeFlags( bit0 ) != bit1;
- }
-
using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
@@ -8709,20 +9950,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class FenceCreateFlagBits
- {
- eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
- {
- switch ( value )
- {
- case FenceCreateFlagBits::eSignaled : return "Signaled";
- default: return "invalid";
- }
- }
-
using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
template <> struct FlagTraits<FenceCreateFlagBits>
@@ -8753,16 +9980,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( FenceCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
{
if ( !value ) return "{}";
@@ -8772,21 +9989,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class FenceImportFlagBits
- {
- eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT,
- eTemporaryKHR = VK_FENCE_IMPORT_TEMPORARY_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
- {
- switch ( value )
- {
- case FenceImportFlagBits::eTemporary : return "Temporary";
- default: return "invalid";
- }
- }
-
using FenceImportFlags = Flags<FenceImportFlagBits, VkFenceImportFlags>;
template <> struct FlagTraits<FenceImportFlagBits>
@@ -8817,16 +10019,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( FenceImportFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceImportFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FenceImportFlags( bit0 ) != bit1;
- }
-
using FenceImportFlagsKHR = FenceImportFlags;
VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
@@ -8838,85 +10030,13 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class FormatFeatureFlagBits
- {
- eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
- eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
- eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
- eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
- eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
- eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
- eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
- eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
- eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
- eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
- eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
- eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
- eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
- eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
- eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
- eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
- eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
- eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
- eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
- eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
- eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
- eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
- eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
- eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
- eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
- eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
- eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
- eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
- eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
- eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
- eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
- eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
- eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
- eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
- eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
- {
- switch ( value )
- {
- case FormatFeatureFlagBits::eSampledImage : return "SampledImage";
- case FormatFeatureFlagBits::eStorageImage : return "StorageImage";
- case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic";
- case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
- case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
- case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
- case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer";
- case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment";
- case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend";
- case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
- case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc";
- case FormatFeatureFlagBits::eBlitDst : return "BlitDst";
- case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear";
- case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc";
- case FormatFeatureFlagBits::eTransferDst : return "TransferDst";
- case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
- case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
- case FormatFeatureFlagBits::eDisjoint : return "Disjoint";
- case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
- case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG";
- case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT : return "SampledImageFilterMinmaxEXT";
- case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
- default: return "invalid";
- }
- }
-
using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
template <> struct FlagTraits<FormatFeatureFlagBits>
{
enum
{
- allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT)
+ allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT)
};
};
@@ -8940,16 +10060,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( FormatFeatureFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FormatFeatureFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
{
if ( !value ) return "{}";
@@ -8977,33 +10087,19 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | ";
if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | ";
+ if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | ";
if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | ";
- if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT ) result += "SampledImageFilterMinmaxEXT | ";
if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class FramebufferCreateFlagBits
- {
- eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
- {
- switch ( value )
- {
- case FramebufferCreateFlagBits::eImagelessKHR : return "ImagelessKHR";
- default: return "invalid";
- }
- }
-
using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
template <> struct FlagTraits<FramebufferCreateFlagBits>
{
enum
{
- allFlags = VkFlags(FramebufferCreateFlagBits::eImagelessKHR)
+ allFlags = VkFlags(FramebufferCreateFlagBits::eImageless)
};
};
@@ -9027,41 +10123,15 @@ namespace VULKAN_HPP_NAMESPACE
return ~( FramebufferCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FramebufferCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return FramebufferCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
{
if ( !value ) return "{}";
std::string result;
- if ( value & FramebufferCreateFlagBits::eImagelessKHR ) result += "ImagelessKHR | ";
+ if ( value & FramebufferCreateFlagBits::eImageless ) result += "Imageless | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class GeometryFlagBitsNV
- {
- eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV,
- eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value )
- {
- switch ( value )
- {
- case GeometryFlagBitsNV::eOpaque : return "Opaque";
- case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
- default: return "invalid";
- }
- }
-
using GeometryFlagsNV = Flags<GeometryFlagBitsNV, VkGeometryFlagsNV>;
template <> struct FlagTraits<GeometryFlagBitsNV>
@@ -9092,16 +10162,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( GeometryFlagsNV( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryFlagsNV( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryFlagsNV( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value )
{
if ( !value ) return "{}";
@@ -9112,26 +10172,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class GeometryInstanceFlagBitsNV
- {
- eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
- eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV,
- eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV,
- eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value )
- {
- switch ( value )
- {
- case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable";
- case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
- case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque";
- case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque";
- default: return "invalid";
- }
- }
-
using GeometryInstanceFlagsNV = Flags<GeometryInstanceFlagBitsNV, VkGeometryInstanceFlagsNV>;
template <> struct FlagTraits<GeometryInstanceFlagBitsNV>
@@ -9162,16 +10202,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( GeometryInstanceFlagsNV( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryInstanceFlagsNV( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return GeometryInstanceFlagsNV( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value )
{
if ( !value ) return "{}";
@@ -9216,43 +10246,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_IOS_MVK*/
- enum class ImageAspectFlagBits
- {
- eColor = VK_IMAGE_ASPECT_COLOR_BIT,
- eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
- eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
- eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
- ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
- ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
- ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
- eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
- eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
- eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
- eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
- ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
- ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
- ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
- {
- switch ( value )
- {
- case ImageAspectFlagBits::eColor : return "Color";
- case ImageAspectFlagBits::eDepth : return "Depth";
- case ImageAspectFlagBits::eStencil : return "Stencil";
- case ImageAspectFlagBits::eMetadata : return "Metadata";
- case ImageAspectFlagBits::ePlane0 : return "Plane0";
- case ImageAspectFlagBits::ePlane1 : return "Plane1";
- case ImageAspectFlagBits::ePlane2 : return "Plane2";
- case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT";
- case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT";
- case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT";
- case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT";
- default: return "invalid";
- }
- }
-
using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
template <> struct FlagTraits<ImageAspectFlagBits>
@@ -9283,16 +10276,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ImageAspectFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageAspectFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageAspectFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
{
if ( !value ) return "{}";
@@ -9312,54 +10295,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ImageCreateFlagBits
- {
- eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
- eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
- eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
- eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
- eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
- eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
- eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
- e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
- eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
- eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
- eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
- eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
- eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
- eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
- eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
- eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
- e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
- eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
- eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
- eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
- eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
- {
- switch ( value )
- {
- case ImageCreateFlagBits::eSparseBinding : return "SparseBinding";
- case ImageCreateFlagBits::eSparseResidency : return "SparseResidency";
- case ImageCreateFlagBits::eSparseAliased : return "SparseAliased";
- case ImageCreateFlagBits::eMutableFormat : return "MutableFormat";
- case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible";
- case ImageCreateFlagBits::eAlias : return "Alias";
- case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
- case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible";
- case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible";
- case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage";
- case ImageCreateFlagBits::eProtected : return "Protected";
- case ImageCreateFlagBits::eDisjoint : return "Disjoint";
- case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV";
- case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT";
- case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
- default: return "invalid";
- }
- }
-
using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
template <> struct FlagTraits<ImageCreateFlagBits>
@@ -9390,16 +10325,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ImageCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
{
if ( !value ) return "{}";
@@ -9440,38 +10365,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_FUCHSIA*/
- enum class ImageUsageFlagBits
- {
- eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
- eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
- eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
- eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
- eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
- eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
- eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
- eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
- eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
- {
- switch ( value )
- {
- case ImageUsageFlagBits::eTransferSrc : return "TransferSrc";
- case ImageUsageFlagBits::eTransferDst : return "TransferDst";
- case ImageUsageFlagBits::eSampled : return "Sampled";
- case ImageUsageFlagBits::eStorage : return "Storage";
- case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment";
- case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
- case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment";
- case ImageUsageFlagBits::eInputAttachment : return "InputAttachment";
- case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
- case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
- default: return "invalid";
- }
- }
-
using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
template <> struct FlagTraits<ImageUsageFlagBits>
@@ -9502,16 +10395,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ImageUsageFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageUsageFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageUsageFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
{
if ( !value ) return "{}";
@@ -9530,20 +10413,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ImageViewCreateFlagBits
- {
- eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
- {
- switch ( value )
- {
- case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
- default: return "invalid";
- }
- }
-
using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
template <> struct FlagTraits<ImageViewCreateFlagBits>
@@ -9574,16 +10443,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ImageViewCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageViewCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ImageViewCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
{
if ( !value ) return "{}";
@@ -9593,26 +10452,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class IndirectCommandsLayoutUsageFlagBitsNVX
- {
- eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
- eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
- eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
- eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
- };
-
- VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value )
- {
- switch ( value )
- {
- case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences";
- case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences";
- case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions";
- case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences";
- default: return "invalid";
- }
- }
-
using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
@@ -9643,16 +10482,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value )
{
if ( !value ) return "{}";
@@ -9665,14 +10494,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class InstanceCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
- {
- return "(void)";
- }
-
using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags )
@@ -9697,28 +10518,13 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- enum class MemoryAllocateFlagBits
- {
- eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
- eDeviceMaskKHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
- {
- switch ( value )
- {
- case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask";
- default: return "invalid";
- }
- }
-
using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits, VkMemoryAllocateFlags>;
template <> struct FlagTraits<MemoryAllocateFlagBits>
{
enum
{
- allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask)
+ allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) | VkFlags(MemoryAllocateFlagBits::eDeviceAddress) | VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay)
};
};
@@ -9742,16 +10548,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( MemoryAllocateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryAllocateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryAllocateFlags( bit0 ) != bit1;
- }
-
using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
@@ -9760,26 +10556,11 @@ namespace VULKAN_HPP_NAMESPACE
std::string result;
if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | ";
+ if ( value & MemoryAllocateFlagBits::eDeviceAddress ) result += "DeviceAddress | ";
+ if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class MemoryHeapFlagBits
- {
- eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
- eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
- eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
- {
- switch ( value )
- {
- case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal";
- case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance";
- default: return "invalid";
- }
- }
-
using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
template <> struct FlagTraits<MemoryHeapFlagBits>
@@ -9810,16 +10591,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( MemoryHeapFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryHeapFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryHeapFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
{
if ( !value ) return "{}";
@@ -9845,34 +10616,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class MemoryPropertyFlagBits
- {
- eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
- eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
- eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
- eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
- eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
- eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
- eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
- eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD
- };
-
- VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
- {
- switch ( value )
- {
- case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal";
- case MemoryPropertyFlagBits::eHostVisible : return "HostVisible";
- case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent";
- case MemoryPropertyFlagBits::eHostCached : return "HostCached";
- case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated";
- case MemoryPropertyFlagBits::eProtected : return "Protected";
- case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD";
- case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD";
- default: return "invalid";
- }
- }
-
using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
template <> struct FlagTraits<MemoryPropertyFlagBits>
@@ -9903,16 +10646,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( MemoryPropertyFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryPropertyFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return MemoryPropertyFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
{
if ( !value ) return "{}";
@@ -9946,22 +10679,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_METAL_EXT*/
- enum class ObjectEntryUsageFlagBitsNVX
- {
- eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
- eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
- };
-
- VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value )
- {
- switch ( value )
- {
- case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics";
- case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute";
- default: return "invalid";
- }
- }
-
using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
@@ -9992,16 +10709,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ObjectEntryUsageFlagsNVX( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ObjectEntryUsageFlagsNVX( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ObjectEntryUsageFlagsNVX( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value )
{
if ( !value ) return "{}";
@@ -10012,30 +10719,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class PeerMemoryFeatureFlagBits
- {
- eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
- eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
- eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
- eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
- eCopySrcKHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR,
- eCopyDstKHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR,
- eGenericSrcKHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR,
- eGenericDstKHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
- {
- switch ( value )
- {
- case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc";
- case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst";
- case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc";
- case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst";
- default: return "invalid";
- }
- }
-
using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits, VkPeerMemoryFeatureFlags>;
template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
@@ -10066,16 +10749,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( PeerMemoryFeatureFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PeerMemoryFeatureFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PeerMemoryFeatureFlags( bit0 ) != bit1;
- }
-
using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
@@ -10090,42 +10763,58 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class PipelineCacheCreateFlagBits
- {};
+ using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR, VkPerformanceCounterDescriptionFlagsKHR>;
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits )
+ template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
{
- return "(void)";
+ enum
+ {
+ allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) | VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1;
}
- using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1;
+ }
- VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags )
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
{
- return "{}";
+ return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1;
}
- enum class PipelineColorBlendStateCreateFlagBits
- {};
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( PerformanceCounterDescriptionFlagsKHR( bits ) );
+ }
- VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
+ VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
{
- return "(void)";
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | ";
+ if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
}
- using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
+ using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
- VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags )
+ VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags )
{
return "{}";
}
- enum class PipelineCompilerControlFlagBitsAMD
- {};
+ using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
- VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
+ VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags )
{
- return "(void)";
+ return "{}";
}
using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD, VkPipelineCompilerControlFlagsAMD>;
@@ -10180,36 +10869,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineCreateFlagBits
- {
- eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
- eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
- eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
- eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
- eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE,
- eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
- eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
- eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
- eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
- eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
- {
- switch ( value )
- {
- case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization";
- case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives";
- case PipelineCreateFlagBits::eDerivative : return "Derivative";
- case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
- case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
- case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
- case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
- case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
- default: return "invalid";
- }
- }
-
using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
template <> struct FlagTraits<PipelineCreateFlagBits>
@@ -10240,16 +10899,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( PipelineCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
{
if ( !value ) return "{}";
@@ -10266,24 +10915,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class PipelineCreationFeedbackFlagBitsEXT
- {
- eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
- eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
- eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value )
- {
- switch ( value )
- {
- case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid";
- case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit";
- case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration";
- default: return "invalid";
- }
- }
-
using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT, VkPipelineCreationFeedbackFlagsEXT>;
template <> struct FlagTraits<PipelineCreationFeedbackFlagBitsEXT>
@@ -10314,16 +10945,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( PipelineCreationFeedbackFlagsEXT( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreationFeedbackFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineCreationFeedbackFlagsEXT( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value )
{
if ( !value ) return "{}";
@@ -10335,14 +10956,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class PipelineDepthStencilStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags )
@@ -10365,14 +10978,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineDynamicStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
@@ -10380,14 +10985,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineInputAssemblyStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
@@ -10395,14 +10992,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineLayoutCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags )
@@ -10410,14 +10999,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineMultisampleStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
@@ -10455,14 +11036,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineRasterizationStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
@@ -10485,22 +11058,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineShaderStageCreateFlagBits
- {
- eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
- eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
- {
- switch ( value )
- {
- case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT";
- case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT";
- default: return "invalid";
- }
- }
-
using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
@@ -10531,16 +11088,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( PipelineShaderStageCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineShaderStageCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineShaderStageCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
{
if ( !value ) return "{}";
@@ -10551,70 +11098,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class PipelineStageFlagBits
- {
- eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
- eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
- eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
- eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
- eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
- eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
- eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
- eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
- eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
- eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
- eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
- eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
- eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
- eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
- eHost = VK_PIPELINE_STAGE_HOST_BIT,
- eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
- eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
- eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
- eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
- eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX,
- eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
- eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
- eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
- eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
- eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
- eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
- {
- switch ( value )
- {
- case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe";
- case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect";
- case PipelineStageFlagBits::eVertexInput : return "VertexInput";
- case PipelineStageFlagBits::eVertexShader : return "VertexShader";
- case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader";
- case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader";
- case PipelineStageFlagBits::eGeometryShader : return "GeometryShader";
- case PipelineStageFlagBits::eFragmentShader : return "FragmentShader";
- case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests";
- case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests";
- case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput";
- case PipelineStageFlagBits::eComputeShader : return "ComputeShader";
- case PipelineStageFlagBits::eTransfer : return "Transfer";
- case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe";
- case PipelineStageFlagBits::eHost : return "Host";
- case PipelineStageFlagBits::eAllGraphics : return "AllGraphics";
- case PipelineStageFlagBits::eAllCommands : return "AllCommands";
- case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
- case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
- case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX";
- case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
- case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV";
- case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV";
- case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV";
- case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
- case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
- default: return "invalid";
- }
- }
-
using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
template <> struct FlagTraits<PipelineStageFlagBits>
@@ -10645,16 +11128,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( PipelineStageFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return PipelineStageFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
{
if ( !value ) return "{}";
@@ -10689,14 +11162,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class PipelineTessellationStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
@@ -10704,14 +11169,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineVertexInputStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
@@ -10719,14 +11176,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class PipelineViewportStateCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
- {
- return "(void)";
- }
-
using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
@@ -10749,20 +11198,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class QueryControlFlagBits
- {
- ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
- {
- switch ( value )
- {
- case QueryControlFlagBits::ePrecise : return "Precise";
- default: return "invalid";
- }
- }
-
using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
template <> struct FlagTraits<QueryControlFlagBits>
@@ -10793,16 +11228,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( QueryControlFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryControlFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryControlFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
{
if ( !value ) return "{}";
@@ -10812,40 +11237,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class QueryPipelineStatisticFlagBits
- {
- eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
- eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
- eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
- eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
- eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
- eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
- eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
- eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
- eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
- eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
- eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
- {
- switch ( value )
- {
- case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices";
- case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives";
- case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations";
- case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations";
- case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives";
- case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations";
- case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives";
- case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations";
- case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches";
- case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations";
- case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations";
- default: return "invalid";
- }
- }
-
using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
@@ -10876,16 +11267,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( QueryPipelineStatisticFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryPipelineStatisticFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryPipelineStatisticFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
{
if ( !value ) return "{}";
@@ -10905,14 +11286,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class QueryPoolCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
- {
- return "(void)";
- }
-
using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
@@ -10920,26 +11293,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class QueryResultFlagBits
- {
- e64 = VK_QUERY_RESULT_64_BIT,
- eWait = VK_QUERY_RESULT_WAIT_BIT,
- eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
- ePartial = VK_QUERY_RESULT_PARTIAL_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
- {
- switch ( value )
- {
- case QueryResultFlagBits::e64 : return "64";
- case QueryResultFlagBits::eWait : return "Wait";
- case QueryResultFlagBits::eWithAvailability : return "WithAvailability";
- case QueryResultFlagBits::ePartial : return "Partial";
- default: return "invalid";
- }
- }
-
using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
template <> struct FlagTraits<QueryResultFlagBits>
@@ -10970,16 +11323,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( QueryResultFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryResultFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueryResultFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
{
if ( !value ) return "{}";
@@ -10992,28 +11335,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class QueueFlagBits
- {
- eGraphics = VK_QUEUE_GRAPHICS_BIT,
- eCompute = VK_QUEUE_COMPUTE_BIT,
- eTransfer = VK_QUEUE_TRANSFER_BIT,
- eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
- eProtected = VK_QUEUE_PROTECTED_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
- {
- switch ( value )
- {
- case QueueFlagBits::eGraphics : return "Graphics";
- case QueueFlagBits::eCompute : return "Compute";
- case QueueFlagBits::eTransfer : return "Transfer";
- case QueueFlagBits::eSparseBinding : return "SparseBinding";
- case QueueFlagBits::eProtected : return "Protected";
- default: return "invalid";
- }
- }
-
using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
template <> struct FlagTraits<QueueFlagBits>
@@ -11044,16 +11365,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( QueueFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueueFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return QueueFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
{
if ( !value ) return "{}";
@@ -11067,14 +11378,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class RenderPassCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits )
- {
- return "(void)";
- }
-
using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags )
@@ -11082,106 +11385,50 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class ResolveModeFlagBitsKHR
- {
- eNone = VK_RESOLVE_MODE_NONE_KHR,
- eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR,
- eAverage = VK_RESOLVE_MODE_AVERAGE_BIT_KHR,
- eMin = VK_RESOLVE_MODE_MIN_BIT_KHR,
- eMax = VK_RESOLVE_MODE_MAX_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBitsKHR value )
- {
- switch ( value )
- {
- case ResolveModeFlagBitsKHR::eNone : return "None";
- case ResolveModeFlagBitsKHR::eSampleZero : return "SampleZero";
- case ResolveModeFlagBitsKHR::eAverage : return "Average";
- case ResolveModeFlagBitsKHR::eMin : return "Min";
- case ResolveModeFlagBitsKHR::eMax : return "Max";
- default: return "invalid";
- }
- }
-
- using ResolveModeFlagsKHR = Flags<ResolveModeFlagBitsKHR, VkResolveModeFlagsKHR>;
+ using ResolveModeFlags = Flags<ResolveModeFlagBits, VkResolveModeFlags>;
- template <> struct FlagTraits<ResolveModeFlagBitsKHR>
+ template <> struct FlagTraits<ResolveModeFlagBits>
{
enum
{
- allFlags = VkFlags(ResolveModeFlagBitsKHR::eNone) | VkFlags(ResolveModeFlagBitsKHR::eSampleZero) | VkFlags(ResolveModeFlagBitsKHR::eAverage) | VkFlags(ResolveModeFlagBitsKHR::eMin) | VkFlags(ResolveModeFlagBitsKHR::eMax)
+ allFlags = VkFlags(ResolveModeFlagBits::eNone) | VkFlags(ResolveModeFlagBits::eSampleZero) | VkFlags(ResolveModeFlagBits::eAverage) | VkFlags(ResolveModeFlagBits::eMin) | VkFlags(ResolveModeFlagBits::eMax)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator|( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return ResolveModeFlagsKHR( bit0 ) | bit1;
+ return ResolveModeFlags( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator&( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return ResolveModeFlagsKHR( bit0 ) & bit1;
+ return ResolveModeFlags( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator^( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return ResolveModeFlagsKHR( bit0 ) ^ bit1;
+ return ResolveModeFlags( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator~( ResolveModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
{
- return ~( ResolveModeFlagsKHR( bits ) );
+ return ~( ResolveModeFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ResolveModeFlagsKHR( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ResolveModeFlagsKHR( bit0 ) != bit1;
- }
+ using ResolveModeFlagsKHR = ResolveModeFlags;
- VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagsKHR value )
+ VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
{
if ( !value ) return "{}";
std::string result;
- if ( value & ResolveModeFlagBitsKHR::eSampleZero ) result += "SampleZero | ";
- if ( value & ResolveModeFlagBitsKHR::eAverage ) result += "Average | ";
- if ( value & ResolveModeFlagBitsKHR::eMin ) result += "Min | ";
- if ( value & ResolveModeFlagBitsKHR::eMax ) result += "Max | ";
+ if ( value & ResolveModeFlagBits::eSampleZero ) result += "SampleZero | ";
+ if ( value & ResolveModeFlagBits::eAverage ) result += "Average | ";
+ if ( value & ResolveModeFlagBits::eMin ) result += "Min | ";
+ if ( value & ResolveModeFlagBits::eMax ) result += "Max | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SampleCountFlagBits
- {
- e1 = VK_SAMPLE_COUNT_1_BIT,
- e2 = VK_SAMPLE_COUNT_2_BIT,
- e4 = VK_SAMPLE_COUNT_4_BIT,
- e8 = VK_SAMPLE_COUNT_8_BIT,
- e16 = VK_SAMPLE_COUNT_16_BIT,
- e32 = VK_SAMPLE_COUNT_32_BIT,
- e64 = VK_SAMPLE_COUNT_64_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
- {
- switch ( value )
- {
- case SampleCountFlagBits::e1 : return "1";
- case SampleCountFlagBits::e2 : return "2";
- case SampleCountFlagBits::e4 : return "4";
- case SampleCountFlagBits::e8 : return "8";
- case SampleCountFlagBits::e16 : return "16";
- case SampleCountFlagBits::e32 : return "32";
- case SampleCountFlagBits::e64 : return "64";
- default: return "invalid";
- }
- }
-
using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
template <> struct FlagTraits<SampleCountFlagBits>
@@ -11212,16 +11459,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SampleCountFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SampleCountFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SampleCountFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
{
if ( !value ) return "{}";
@@ -11237,22 +11474,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SamplerCreateFlagBits
- {
- eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
- eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
- {
- switch ( value )
- {
- case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
- case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT";
- default: return "invalid";
- }
- }
-
using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
template <> struct FlagTraits<SamplerCreateFlagBits>
@@ -11283,16 +11504,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SamplerCreateFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SamplerCreateFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SamplerCreateFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
{
if ( !value ) return "{}";
@@ -11303,14 +11514,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SemaphoreCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
- {
- return "(void)";
- }
-
using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
@@ -11318,21 +11521,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class SemaphoreImportFlagBits
- {
- eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
- eTemporaryKHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
- {
- switch ( value )
- {
- case SemaphoreImportFlagBits::eTemporary : return "Temporary";
- default: return "invalid";
- }
- }
-
using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits, VkSemaphoreImportFlags>;
template <> struct FlagTraits<SemaphoreImportFlagBits>
@@ -11363,16 +11551,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SemaphoreImportFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreImportFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreImportFlags( bit0 ) != bit1;
- }
-
using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
@@ -11384,77 +11562,47 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SemaphoreWaitFlagBitsKHR
- {
- eAny = VK_SEMAPHORE_WAIT_ANY_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBitsKHR value )
- {
- switch ( value )
- {
- case SemaphoreWaitFlagBitsKHR::eAny : return "Any";
- default: return "invalid";
- }
- }
-
- using SemaphoreWaitFlagsKHR = Flags<SemaphoreWaitFlagBitsKHR, VkSemaphoreWaitFlagsKHR>;
+ using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits, VkSemaphoreWaitFlags>;
- template <> struct FlagTraits<SemaphoreWaitFlagBitsKHR>
+ template <> struct FlagTraits<SemaphoreWaitFlagBits>
{
enum
{
- allFlags = VkFlags(SemaphoreWaitFlagBitsKHR::eAny)
+ allFlags = VkFlags(SemaphoreWaitFlagBits::eAny)
};
};
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator|( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreWaitFlagsKHR( bit0 ) | bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator&( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return SemaphoreWaitFlagsKHR( bit0 ) & bit1;
+ return SemaphoreWaitFlags( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator^( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return SemaphoreWaitFlagsKHR( bit0 ) ^ bit1;
+ return SemaphoreWaitFlags( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlagsKHR operator~( SemaphoreWaitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
{
- return ~( SemaphoreWaitFlagsKHR( bits ) );
+ return SemaphoreWaitFlags( bit0 ) ^ bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT
{
- return SemaphoreWaitFlagsKHR( bit0 ) == bit1;
+ return ~( SemaphoreWaitFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SemaphoreWaitFlagBitsKHR bit0, SemaphoreWaitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SemaphoreWaitFlagsKHR( bit0 ) != bit1;
- }
+ using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
- VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagsKHR value )
+ VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
{
if ( !value ) return "{}";
std::string result;
- if ( value & SemaphoreWaitFlagBitsKHR::eAny ) result += "Any | ";
+ if ( value & SemaphoreWaitFlagBits::eAny ) result += "Any | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class ShaderCorePropertiesFlagBitsAMD
- {};
-
- VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
- {
- return "(void)";
- }
-
using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD, VkShaderCorePropertiesFlagsAMD>;
VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
@@ -11462,14 +11610,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class ShaderModuleCreateFlagBits
- {};
-
- VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
- {
- return "(void)";
- }
-
using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
@@ -11477,50 +11617,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
- enum class ShaderStageFlagBits
- {
- eVertex = VK_SHADER_STAGE_VERTEX_BIT,
- eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
- eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
- eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
- eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
- eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
- eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
- eAll = VK_SHADER_STAGE_ALL,
- eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
- eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
- eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
- eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
- eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
- eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
- eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
- eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
- {
- switch ( value )
- {
- case ShaderStageFlagBits::eVertex : return "Vertex";
- case ShaderStageFlagBits::eTessellationControl : return "TessellationControl";
- case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation";
- case ShaderStageFlagBits::eGeometry : return "Geometry";
- case ShaderStageFlagBits::eFragment : return "Fragment";
- case ShaderStageFlagBits::eCompute : return "Compute";
- case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
- case ShaderStageFlagBits::eAll : return "All";
- case ShaderStageFlagBits::eRaygenNV : return "RaygenNV";
- case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV";
- case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV";
- case ShaderStageFlagBits::eMissNV : return "MissNV";
- case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV";
- case ShaderStageFlagBits::eCallableNV : return "CallableNV";
- case ShaderStageFlagBits::eTaskNV : return "TaskNV";
- case ShaderStageFlagBits::eMeshNV : return "MeshNV";
- default: return "invalid";
- }
- }
-
using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
template <> struct FlagTraits<ShaderStageFlagBits>
@@ -11551,16 +11647,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( ShaderStageFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ShaderStageFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return ShaderStageFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
{
if ( !value ) return "{}";
@@ -11583,24 +11669,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SparseImageFormatFlagBits
- {
- eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
- eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
- eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
- {
- switch ( value )
- {
- case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail";
- case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize";
- case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize";
- default: return "invalid";
- }
- }
-
using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
template <> struct FlagTraits<SparseImageFormatFlagBits>
@@ -11631,16 +11699,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SparseImageFormatFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseImageFormatFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseImageFormatFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
{
if ( !value ) return "{}";
@@ -11652,20 +11710,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SparseMemoryBindFlagBits
- {
- eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
- };
-
- VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
- {
- switch ( value )
- {
- case SparseMemoryBindFlagBits::eMetadata : return "Metadata";
- default: return "invalid";
- }
- }
-
using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
template <> struct FlagTraits<SparseMemoryBindFlagBits>
@@ -11696,16 +11740,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SparseMemoryBindFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseMemoryBindFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SparseMemoryBindFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
{
if ( !value ) return "{}";
@@ -11715,25 +11749,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class StencilFaceFlagBits
- {
- eFront = VK_STENCIL_FACE_FRONT_BIT,
- eBack = VK_STENCIL_FACE_BACK_BIT,
- eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
- eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
- };
-
- VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
- {
- switch ( value )
- {
- case StencilFaceFlagBits::eFront : return "Front";
- case StencilFaceFlagBits::eBack : return "Back";
- case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack";
- default: return "invalid";
- }
- }
-
using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
template <> struct FlagTraits<StencilFaceFlagBits>
@@ -11764,16 +11779,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( StencilFaceFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return StencilFaceFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return StencilFaceFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
{
if ( !value ) return "{}";
@@ -11801,36 +11806,6 @@ namespace VULKAN_HPP_NAMESPACE
}
#endif /*VK_USE_PLATFORM_GGP*/
- enum class SubgroupFeatureFlagBits
- {
- eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
- eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
- eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
- eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
- eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
- eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
- eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
- eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
- ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
- };
-
- VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
- {
- switch ( value )
- {
- case SubgroupFeatureFlagBits::eBasic : return "Basic";
- case SubgroupFeatureFlagBits::eVote : return "Vote";
- case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic";
- case SubgroupFeatureFlagBits::eBallot : return "Ballot";
- case SubgroupFeatureFlagBits::eShuffle : return "Shuffle";
- case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative";
- case SubgroupFeatureFlagBits::eClustered : return "Clustered";
- case SubgroupFeatureFlagBits::eQuad : return "Quad";
- case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV";
- default: return "invalid";
- }
- }
-
using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits, VkSubgroupFeatureFlags>;
template <> struct FlagTraits<SubgroupFeatureFlagBits>
@@ -11861,16 +11836,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SubgroupFeatureFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubgroupFeatureFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubgroupFeatureFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
{
if ( !value ) return "{}";
@@ -11888,22 +11853,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SubpassDescriptionFlagBits
- {
- ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
- ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX
- };
-
- VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
- {
- switch ( value )
- {
- case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
- case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
- default: return "invalid";
- }
- }
-
using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
template <> struct FlagTraits<SubpassDescriptionFlagBits>
@@ -11934,16 +11883,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SubpassDescriptionFlags( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubpassDescriptionFlags( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SubpassDescriptionFlags( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
{
if ( !value ) return "{}";
@@ -11954,20 +11893,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SurfaceCounterFlagBitsEXT
- {
- eVblank = VK_SURFACE_COUNTER_VBLANK_EXT
- };
-
- VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
- {
- switch ( value )
- {
- case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank";
- default: return "invalid";
- }
- }
-
using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
@@ -11998,16 +11923,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SurfaceCounterFlagsEXT( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceCounterFlagsEXT( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceCounterFlagsEXT( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
{
if ( !value ) return "{}";
@@ -12017,36 +11932,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SurfaceTransformFlagBitsKHR
- {
- eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
- eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
- eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
- eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
- eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
- eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
- eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
- eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
- eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
- {
- switch ( value )
- {
- case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity";
- case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90";
- case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180";
- case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180";
- case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270";
- case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit";
- default: return "invalid";
- }
- }
-
using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
@@ -12077,16 +11962,6 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SurfaceTransformFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceTransformFlagsKHR( bit0 ) == bit1;
- }
-
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
- {
- return SurfaceTransformFlagsKHR( bit0 ) != bit1;
- }
-
VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
{
if ( !value ) return "{}";
@@ -12104,24 +11979,6 @@ namespace VULKAN_HPP_NAMESPACE
return "{ " + result.substr(0, result.size() - 3) + " }";
}
- enum class SwapchainCreateFlagBitsKHR
- {
- eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
- eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
- eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR
- };
-
- VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
- {
- switch ( value )
- {
- case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
- case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
- case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
- default: return "invalid";
- }
- }
-
using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
@@ -12152,24 +12009,59 @@ namespace VULKAN_HPP_NAMESPACE
return ~( SwapchainCreateFlagsKHR( bits ) );
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator==( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
+ {
+ if ( !value ) return "{}";
+ std::string result;
+
+ if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
+ if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | ";
+ if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | ";
+ return "{ " + result.substr(0, result.size() - 3) + " }";
+ }
+
+ using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT, VkToolPurposeFlagsEXT>;
+
+ template <> struct FlagTraits<ToolPurposeFlagBitsEXT>
{
- return SwapchainCreateFlagsKHR( bit0 ) == bit1;
+ enum
+ {
+ allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) | VkFlags(ToolPurposeFlagBitsEXT::eProfiling) | VkFlags(ToolPurposeFlagBitsEXT::eTracing) | VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) | VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) | VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers)
+ };
+ };
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return ToolPurposeFlagsEXT( bit0 ) | bit1;
}
- VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR bool operator!=( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
{
- return SwapchainCreateFlagsKHR( bit0 ) != bit1;
+ return ToolPurposeFlagsEXT( bit0 ) & bit1;
}
- VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
+ {
+ return ToolPurposeFlagsEXT( bit0 ) ^ bit1;
+ }
+
+ VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
+ {
+ return ~( ToolPurposeFlagsEXT( bits ) );
+ }
+
+ VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value )
{
if ( !value ) return "{}";
std::string result;
- if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
- if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | ";
- if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | ";
+ if ( value & ToolPurposeFlagBitsEXT::eValidation ) result += "Validation | ";
+ if ( value & ToolPurposeFlagBitsEXT::eProfiling ) result += "Profiling | ";
+ if ( value & ToolPurposeFlagBitsEXT::eTracing ) result += "Tracing | ";
+ if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) result += "AdditionalFeatures | ";
+ if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) result += "ModifyingFeatures | ";
+ if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) result += "DebugReporting | ";
+ if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) result += "DebugMarkers | ";
return "{ " + result.substr(0, result.size() - 3) + " }";
}
@@ -12274,12 +12166,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
} // namespace VULKAN_HPP_NAMESPACE
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
namespace std
{
template <>
struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
{};
}
+#endif
namespace VULKAN_HPP_NAMESPACE
{
@@ -12455,6 +12349,15 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
};
+ class UnknownError : public SystemError
+ {
+ public:
+ UnknownError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+ UnknownError( char const * message )
+ : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+ };
+
class OutOfPoolMemoryError : public SystemError
{
public:
@@ -12473,6 +12376,24 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
};
+ class FragmentationError : public SystemError
+ {
+ public:
+ FragmentationError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+ FragmentationError( char const * message )
+ : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+ };
+
+ class InvalidOpaqueCaptureAddressError : public SystemError
+ {
+ public:
+ InvalidOpaqueCaptureAddressError( std::string const& message )
+ : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+ InvalidOpaqueCaptureAddressError( char const * message )
+ : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+ };
+
class SurfaceLostKHRError : public SystemError
{
public:
@@ -12536,15 +12457,6 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
};
- class FragmentationEXTError : public SystemError
- {
- public:
- FragmentationEXTError( std::string const& message )
- : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
- FragmentationEXTError( char const * message )
- : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {}
- };
-
class NotPermittedEXTError : public SystemError
{
public:
@@ -12554,15 +12466,6 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
};
- class InvalidDeviceAddressEXTError : public SystemError
- {
- public:
- InvalidDeviceAddressEXTError( std::string const& message )
- : SystemError( make_error_code( Result::eErrorInvalidDeviceAddressEXT ), message ) {}
- InvalidDeviceAddressEXTError( char const * message )
- : SystemError( make_error_code( Result::eErrorInvalidDeviceAddressEXT ), message ) {}
- };
-
class FullScreenExclusiveModeLostEXTError : public SystemError
{
public:
@@ -12572,7 +12475,7 @@ namespace VULKAN_HPP_NAMESPACE
: SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
};
- VULKAN_HPP_INLINE void throwResultException( Result result, char const * message )
+ [[noreturn]] static void throwResultException( Result result, char const * message )
{
switch ( result )
{
@@ -12588,8 +12491,11 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
+ case Result::eErrorUnknown: throw UnknownError( message );
case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
+ case Result::eErrorFragmentation: throw FragmentationError( message );
+ case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message );
case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
@@ -12597,9 +12503,7 @@ namespace VULKAN_HPP_NAMESPACE
case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
- case Result::eErrorFragmentationEXT: throw FragmentationEXTError( message );
case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message );
- case Result::eErrorInvalidDeviceAddressEXT: throw InvalidDeviceAddressEXTError( message );
case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
default: throw SystemError( make_error_code( result ) );
}
@@ -12736,6 +12640,7 @@ namespace VULKAN_HPP_NAMESPACE
struct AccelerationStructureInfoNV;
struct AccelerationStructureMemoryRequirementsInfoNV;
struct AcquireNextImageInfoKHR;
+ struct AcquireProfilingLockInfoKHR;
struct AllocationCallbacks;
#ifdef VK_USE_PLATFORM_ANDROID_KHR
struct AndroidHardwareBufferFormatPropertiesANDROID;
@@ -12751,11 +12656,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
struct ApplicationInfo;
struct AttachmentDescription;
- struct AttachmentDescription2KHR;
- struct AttachmentDescriptionStencilLayoutKHR;
+ struct AttachmentDescription2;
+ using AttachmentDescription2KHR = AttachmentDescription2;
+ struct AttachmentDescriptionStencilLayout;
+ using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
struct AttachmentReference;
- struct AttachmentReference2KHR;
- struct AttachmentReferenceStencilLayoutKHR;
+ struct AttachmentReference2;
+ using AttachmentReference2KHR = AttachmentReference2;
+ struct AttachmentReferenceStencilLayout;
+ using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
struct AttachmentSampleLocationsEXT;
struct BaseInStructure;
struct BaseOutStructure;
@@ -12775,11 +12684,15 @@ namespace VULKAN_HPP_NAMESPACE
struct BufferCopy;
struct BufferCreateInfo;
struct BufferDeviceAddressCreateInfoEXT;
- struct BufferDeviceAddressInfoEXT;
+ struct BufferDeviceAddressInfo;
+ using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
+ using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
struct BufferImageCopy;
struct BufferMemoryBarrier;
struct BufferMemoryRequirementsInfo2;
using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+ struct BufferOpaqueCaptureAddressCreateInfo;
+ using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
struct BufferViewCreateInfo;
struct CalibratedTimestampInfoEXT;
struct CheckpointDataNV;
@@ -12800,7 +12713,8 @@ namespace VULKAN_HPP_NAMESPACE
struct ComponentMapping;
struct ComputePipelineCreateInfo;
struct ConditionalRenderingBeginInfoEXT;
- struct ConformanceVersionKHR;
+ struct ConformanceVersion;
+ using ConformanceVersionKHR = ConformanceVersion;
struct CooperativeMatrixPropertiesNV;
struct CopyDescriptorSet;
#ifdef VK_USE_PLATFORM_WIN32_KHR
@@ -12825,12 +12739,15 @@ namespace VULKAN_HPP_NAMESPACE
struct DescriptorPoolSize;
struct DescriptorSetAllocateInfo;
struct DescriptorSetLayoutBinding;
- struct DescriptorSetLayoutBindingFlagsCreateInfoEXT;
+ struct DescriptorSetLayoutBindingFlagsCreateInfo;
+ using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
struct DescriptorSetLayoutCreateInfo;
struct DescriptorSetLayoutSupport;
using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
- struct DescriptorSetVariableDescriptorCountAllocateInfoEXT;
- struct DescriptorSetVariableDescriptorCountLayoutSupportEXT;
+ struct DescriptorSetVariableDescriptorCountAllocateInfo;
+ using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
+ struct DescriptorSetVariableDescriptorCountLayoutSupport;
+ using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
struct DescriptorUpdateTemplateCreateInfo;
using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
struct DescriptorUpdateTemplateEntry;
@@ -12852,6 +12769,8 @@ namespace VULKAN_HPP_NAMESPACE
struct DeviceGroupSubmitInfo;
using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
struct DeviceGroupSwapchainCreateInfoKHR;
+ struct DeviceMemoryOpaqueCaptureAddressInfo;
+ using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
struct DeviceMemoryOverallocationCreateInfoAMD;
struct DeviceQueueCreateInfo;
struct DeviceQueueGlobalPriorityCreateInfoEXT;
@@ -12929,8 +12848,10 @@ namespace VULKAN_HPP_NAMESPACE
struct FormatProperties;
struct FormatProperties2;
using FormatProperties2KHR = FormatProperties2;
- struct FramebufferAttachmentImageInfoKHR;
- struct FramebufferAttachmentsCreateInfoKHR;
+ struct FramebufferAttachmentImageInfo;
+ using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
+ struct FramebufferAttachmentsCreateInfo;
+ using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
struct FramebufferCreateInfo;
struct FramebufferMixedSamplesCombinationNV;
struct GeometryAABBNV;
@@ -12949,7 +12870,8 @@ namespace VULKAN_HPP_NAMESPACE
struct ImageDrmFormatModifierExplicitCreateInfoEXT;
struct ImageDrmFormatModifierListCreateInfoEXT;
struct ImageDrmFormatModifierPropertiesEXT;
- struct ImageFormatListCreateInfoKHR;
+ struct ImageFormatListCreateInfo;
+ using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
struct ImageFormatProperties;
struct ImageFormatProperties2;
using ImageFormatProperties2KHR = ImageFormatProperties2;
@@ -12964,7 +12886,8 @@ namespace VULKAN_HPP_NAMESPACE
struct ImageResolve;
struct ImageSparseMemoryRequirementsInfo2;
using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
- struct ImageStencilUsageCreateInfoEXT;
+ struct ImageStencilUsageCreateInfo;
+ using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
struct ImageSubresource;
struct ImageSubresourceLayers;
struct ImageSubresourceRange;
@@ -13023,6 +12946,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
struct MemoryHeap;
struct MemoryHostPointerPropertiesEXT;
+ struct MemoryOpaqueCaptureAddressAllocateInfo;
+ using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
struct MemoryPriorityAllocateInfoEXT;
struct MemoryRequirements;
struct MemoryRequirements2;
@@ -13046,17 +12971,24 @@ namespace VULKAN_HPP_NAMESPACE
struct Offset3D;
struct PastPresentationTimingGOOGLE;
struct PerformanceConfigurationAcquireInfoINTEL;
+ struct PerformanceCounterDescriptionKHR;
+ struct PerformanceCounterKHR;
+ union PerformanceCounterResultKHR;
struct PerformanceMarkerInfoINTEL;
struct PerformanceOverrideInfoINTEL;
+ struct PerformanceQuerySubmitInfoKHR;
struct PerformanceStreamMarkerInfoINTEL;
union PerformanceValueDataINTEL;
struct PerformanceValueINTEL;
struct PhysicalDevice16BitStorageFeatures;
using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
- struct PhysicalDevice8BitStorageFeaturesKHR;
+ struct PhysicalDevice8BitStorageFeatures;
+ using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
struct PhysicalDeviceASTCDecodeFeaturesEXT;
struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+ struct PhysicalDeviceBufferDeviceAddressFeatures;
+ using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
struct PhysicalDeviceBufferDeviceAddressFeaturesEXT;
using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
struct PhysicalDeviceCoherentMemoryFeaturesAMD;
@@ -13069,11 +13001,15 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceCoverageReductionModeFeaturesNV;
struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
struct PhysicalDeviceDepthClipEnableFeaturesEXT;
- struct PhysicalDeviceDepthStencilResolvePropertiesKHR;
- struct PhysicalDeviceDescriptorIndexingFeaturesEXT;
- struct PhysicalDeviceDescriptorIndexingPropertiesEXT;
+ struct PhysicalDeviceDepthStencilResolveProperties;
+ using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
+ struct PhysicalDeviceDescriptorIndexingFeatures;
+ using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
+ struct PhysicalDeviceDescriptorIndexingProperties;
+ using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
struct PhysicalDeviceDiscardRectanglePropertiesEXT;
- struct PhysicalDeviceDriverPropertiesKHR;
+ struct PhysicalDeviceDriverProperties;
+ using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
struct PhysicalDeviceExclusiveScissorFeaturesNV;
struct PhysicalDeviceExternalBufferInfo;
using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
@@ -13087,21 +13023,24 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceFeatures;
struct PhysicalDeviceFeatures2;
using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
- struct PhysicalDeviceFloatControlsPropertiesKHR;
+ struct PhysicalDeviceFloatControlsProperties;
+ using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
struct PhysicalDeviceFragmentDensityMapFeaturesEXT;
struct PhysicalDeviceFragmentDensityMapPropertiesEXT;
struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
struct PhysicalDeviceGroupProperties;
using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
- struct PhysicalDeviceHostQueryResetFeaturesEXT;
+ struct PhysicalDeviceHostQueryResetFeatures;
+ using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
struct PhysicalDeviceIDProperties;
using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
struct PhysicalDeviceImageDrmFormatModifierInfoEXT;
struct PhysicalDeviceImageFormatInfo2;
using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
struct PhysicalDeviceImageViewImageFormatInfoEXT;
- struct PhysicalDeviceImagelessFramebufferFeaturesKHR;
+ struct PhysicalDeviceImagelessFramebufferFeatures;
+ using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
struct PhysicalDeviceIndexTypeUint8FeaturesEXT;
struct PhysicalDeviceInlineUniformBlockFeaturesEXT;
struct PhysicalDeviceInlineUniformBlockPropertiesEXT;
@@ -13123,6 +13062,8 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceMultiviewProperties;
using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
struct PhysicalDevicePCIBusInfoPropertiesEXT;
+ struct PhysicalDevicePerformanceQueryFeaturesKHR;
+ struct PhysicalDevicePerformanceQueryPropertiesKHR;
struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
struct PhysicalDevicePointClippingProperties;
using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
@@ -13135,25 +13076,31 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceRayTracingPropertiesNV;
struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
struct PhysicalDeviceSampleLocationsPropertiesEXT;
- struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+ struct PhysicalDeviceSamplerFilterMinmaxProperties;
+ using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
struct PhysicalDeviceSamplerYcbcrConversionFeatures;
using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
- struct PhysicalDeviceScalarBlockLayoutFeaturesEXT;
- struct PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
- struct PhysicalDeviceShaderAtomicInt64FeaturesKHR;
+ struct PhysicalDeviceScalarBlockLayoutFeatures;
+ using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+ struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ struct PhysicalDeviceShaderAtomicInt64Features;
+ using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
struct PhysicalDeviceShaderClockFeaturesKHR;
struct PhysicalDeviceShaderCoreProperties2AMD;
struct PhysicalDeviceShaderCorePropertiesAMD;
struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
struct PhysicalDeviceShaderDrawParametersFeatures;
using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
- struct PhysicalDeviceShaderFloat16Int8FeaturesKHR;
- using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8FeaturesKHR;
+ struct PhysicalDeviceShaderFloat16Int8Features;
+ using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+ using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
struct PhysicalDeviceShaderImageFootprintFeaturesNV;
struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
struct PhysicalDeviceShaderSMBuiltinsFeaturesNV;
struct PhysicalDeviceShaderSMBuiltinsPropertiesNV;
- struct PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+ struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
struct PhysicalDeviceShadingRateImageFeaturesNV;
struct PhysicalDeviceShadingRateImagePropertiesNV;
struct PhysicalDeviceSparseImageFormatInfo2;
@@ -13166,18 +13113,27 @@ namespace VULKAN_HPP_NAMESPACE
struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
- struct PhysicalDeviceTimelineSemaphoreFeaturesKHR;
- struct PhysicalDeviceTimelineSemaphorePropertiesKHR;
+ struct PhysicalDeviceTimelineSemaphoreFeatures;
+ using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
+ struct PhysicalDeviceTimelineSemaphoreProperties;
+ using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
+ struct PhysicalDeviceToolPropertiesEXT;
struct PhysicalDeviceTransformFeedbackFeaturesEXT;
struct PhysicalDeviceTransformFeedbackPropertiesEXT;
- struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+ struct PhysicalDeviceUniformBufferStandardLayoutFeatures;
+ using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
struct PhysicalDeviceVariablePointersFeatures;
using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
- struct PhysicalDeviceVulkanMemoryModelFeaturesKHR;
+ struct PhysicalDeviceVulkan11Features;
+ struct PhysicalDeviceVulkan11Properties;
+ struct PhysicalDeviceVulkan12Features;
+ struct PhysicalDeviceVulkan12Properties;
+ struct PhysicalDeviceVulkanMemoryModelFeatures;
+ using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
struct PhysicalDeviceYcbcrImageArraysFeaturesEXT;
struct PipelineCacheCreateInfo;
struct PipelineColorBlendAdvancedStateCreateInfoEXT;
@@ -13234,6 +13190,7 @@ namespace VULKAN_HPP_NAMESPACE
struct PushConstantRange;
struct QueryPoolCreateInfo;
struct QueryPoolCreateInfoINTEL;
+ struct QueryPoolPerformanceCreateInfoKHR;
struct QueueFamilyCheckpointPropertiesNV;
struct QueueFamilyProperties;
struct QueueFamilyProperties2;
@@ -13243,10 +13200,12 @@ namespace VULKAN_HPP_NAMESPACE
struct Rect2D;
struct RectLayerKHR;
struct RefreshCycleDurationGOOGLE;
- struct RenderPassAttachmentBeginInfoKHR;
+ struct RenderPassAttachmentBeginInfo;
+ using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
struct RenderPassBeginInfo;
struct RenderPassCreateInfo;
- struct RenderPassCreateInfo2KHR;
+ struct RenderPassCreateInfo2;
+ using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
struct RenderPassFragmentDensityMapCreateInfoEXT;
struct RenderPassInputAttachmentAspectCreateInfo;
using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
@@ -13256,7 +13215,8 @@ namespace VULKAN_HPP_NAMESPACE
struct SampleLocationEXT;
struct SampleLocationsInfoEXT;
struct SamplerCreateInfo;
- struct SamplerReductionModeCreateInfoEXT;
+ struct SamplerReductionModeCreateInfo;
+ using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
struct SamplerYcbcrConversionCreateInfo;
using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
struct SamplerYcbcrConversionImageFormatProperties;
@@ -13268,9 +13228,12 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
struct SemaphoreGetWin32HandleInfoKHR;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- struct SemaphoreSignalInfoKHR;
- struct SemaphoreTypeCreateInfoKHR;
- struct SemaphoreWaitInfoKHR;
+ struct SemaphoreSignalInfo;
+ using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
+ struct SemaphoreTypeCreateInfo;
+ using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
+ struct SemaphoreWaitInfo;
+ using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
struct ShaderModuleCreateInfo;
struct ShaderModuleValidationCacheCreateInfoEXT;
struct ShaderResourceUsageAMD;
@@ -13295,13 +13258,18 @@ namespace VULKAN_HPP_NAMESPACE
struct StreamDescriptorSurfaceCreateInfoGGP;
#endif /*VK_USE_PLATFORM_GGP*/
struct SubmitInfo;
- struct SubpassBeginInfoKHR;
+ struct SubpassBeginInfo;
+ using SubpassBeginInfoKHR = SubpassBeginInfo;
struct SubpassDependency;
- struct SubpassDependency2KHR;
+ struct SubpassDependency2;
+ using SubpassDependency2KHR = SubpassDependency2;
struct SubpassDescription;
- struct SubpassDescription2KHR;
- struct SubpassDescriptionDepthStencilResolveKHR;
- struct SubpassEndInfoKHR;
+ struct SubpassDescription2;
+ using SubpassDescription2KHR = SubpassDescription2;
+ struct SubpassDescriptionDepthStencilResolve;
+ using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
+ struct SubpassEndInfo;
+ using SubpassEndInfoKHR = SubpassEndInfo;
struct SubpassSampleLocationsEXT;
struct SubresourceLayout;
struct SurfaceCapabilities2EXT;
@@ -13323,7 +13291,8 @@ namespace VULKAN_HPP_NAMESPACE
struct SwapchainCreateInfoKHR;
struct SwapchainDisplayNativeHdrCreateInfoAMD;
struct TextureLODGatherFormatPropertiesAMD;
- struct TimelineSemaphoreSubmitInfoKHR;
+ struct TimelineSemaphoreSubmitInfo;
+ using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
struct ValidationCacheCreateInfoEXT;
struct ValidationFeaturesEXT;
struct ValidationFlagsEXT;
@@ -14760,154 +14729,161 @@ namespace VULKAN_HPP_NAMESPACE
}
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result begin( const vk::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginConditionalRenderingEXT( const vk::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQuery( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const vk::RenderPassBeginInfo* pRenderPassBegin, vk::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, vk::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const vk::RenderPassBeginInfo* pRenderPassBegin, const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const vk::Buffer> counterBuffers, ArrayProxy<const vk::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const vk::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindIndexBuffer( vk::Buffer buffer, vk::DeviceSize offset, vk::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindPipeline( vk::PipelineBindPoint pipelineBindPoint, vk::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindShadingRateImageNV( vk::ImageView imageView, vk::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, const vk::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const vk::Buffer> buffers, ArrayProxy<const vk::DeviceSize> offsets, ArrayProxy<const vk::DeviceSize> sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const vk::Buffer> buffers, ArrayProxy<const vk::DeviceSize> offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageBlit* pRegions, vk::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::ImageBlit> regions, vk::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const vk::AccelerationStructureInfoNV* pInfo, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( uint32_t attachmentCount, const vk::ClearAttachment* pAttachments, uint32_t rectCount, const vk::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearAttachments( ArrayProxy<const vk::ClearAttachment> attachments, ArrayProxy<const vk::ClearRect> rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearColorValue* pColor, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const vk::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const vk::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyAccelerationStructureNV( vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, ArrayProxy<const vk::BufferCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::ImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, ArrayProxy<const vk::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void copyQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerBeginEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
@@ -14917,7 +14893,7 @@ namespace VULKAN_HPP_NAMESPACE
void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugMarkerInsertEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
@@ -14933,7 +14909,7 @@ namespace VULKAN_HPP_NAMESPACE
void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void dispatchIndirect( vk::Buffer buffer, vk::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
@@ -14942,31 +14918,37 @@ namespace VULKAN_HPP_NAMESPACE
void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndexedIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, vk::Buffer counterBuffer, vk::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectCountNV( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void drawMeshTasksIndirectNV( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
@@ -14978,104 +14960,118 @@ namespace VULKAN_HPP_NAMESPACE
void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQuery( vk::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const vk::Buffer> counterBuffers, ArrayProxy<const vk::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void executeCommands( ArrayProxy<const vk::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void fillBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass( vk::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, ArrayProxy<const vk::MemoryBarrier> memoryBarriers, ArrayProxy<const vk::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const vk::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void processCommandsNVX( const vk::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, ArrayProxy<const vk::WriteDescriptorSet> descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void pushDescriptorSetWithTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, vk::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void reserveSpaceForCommandsNVX( const vk::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPool( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::ImageResolve> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -15085,10 +15081,10 @@ namespace VULKAN_HPP_NAMESPACE
void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const vk::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -15104,20 +15100,20 @@ namespace VULKAN_HPP_NAMESPACE
void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const vk::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const vk::Rect2D> discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const vk::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const vk::Rect2D> exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -15127,114 +15123,114 @@ namespace VULKAN_HPP_NAMESPACE
void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceMarkerINTEL( const vk::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceOverrideINTEL( const vk::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceStreamMarkerINTEL( const vk::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setSampleLocationsEXT( const vk::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor, uint32_t scissorCount, const vk::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setScissor( uint32_t firstScissor, ArrayProxy<const vk::Rect2D> scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilCompareMask( vk::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilReference( vk::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setStencilWriteMask( vk::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport, uint32_t viewportCount, const vk::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewport( uint32_t firstViewport, ArrayProxy<const vk::Viewport> viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const vk::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const vk::ViewportWScalingNV> viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void traceRaysNV( vk::Buffer raygenShaderBindingTableBuffer, vk::DeviceSize raygenShaderBindingOffset, vk::Buffer missShaderBindingTableBuffer, vk::DeviceSize missShaderBindingOffset, vk::DeviceSize missShaderBindingStride, vk::Buffer hitShaderBindingTableBuffer, vk::DeviceSize hitShaderBindingOffset, vk::DeviceSize hitShaderBindingStride, vk::Buffer callableShaderBindingTableBuffer, vk::DeviceSize callableShaderBindingOffset, vk::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( uint32_t eventCount, const vk::Event* pEvents, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void waitEvents( ArrayProxy<const vk::Event> events, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, ArrayProxy<const vk::MemoryBarrier> memoryBarriers, ArrayProxy<const vk::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const vk::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const vk::AccelerationStructureNV* pAccelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeAccelerationStructuresPropertiesNV( ArrayProxy<const vk::AccelerationStructureNV> accelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeBufferMarkerAMD( vk::PipelineStageFlagBits pipelineStage, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void writeTimestamp( vk::PipelineStageFlagBits pipelineStage, vk::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result reset( vk::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type reset( vk::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
@@ -16364,7 +16360,7 @@ namespace VULKAN_HPP_NAMESPACE
}
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getCheckpointDataNV( uint32_t* pCheckpointDataCount, vk::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<CheckpointDataNV,Allocator> getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16373,31 +16369,31 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindSparse( uint32_t bindInfoCount, const vk::BindSparseInfo* pBindInfo, vk::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindSparse( ArrayProxy<const vk::BindSparseInfo> bindInfo, vk::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result presentKHR( const vk::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16405,25 +16401,25 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result submit( uint32_t submitCount, const vk::SubmitInfo* pSubmits, vk::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type submit( ArrayProxy<const vk::SubmitInfo> submits, vk::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
@@ -16472,6 +16468,7 @@ namespace VULKAN_HPP_NAMESPACE
using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch> class UniqueHandleTraits<DeviceMemory, Dispatch> { public: using deleter = ObjectFree<Device, Dispatch>; };
using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch> class UniqueHandleTraits<Event, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
@@ -16502,6 +16499,7 @@ namespace VULKAN_HPP_NAMESPACE
using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
+ using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch> class UniqueHandleTraits<Semaphore, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch> class UniqueHandleTraits<ShaderModule, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
@@ -16564,36 +16562,43 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireNextImage2KHR( const vk::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValue<uint32_t> acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquirePerformanceConfigurationINTEL( const vk::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, vk::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result allocateCommandBuffers( const vk::CommandBufferAllocateInfo* pAllocateInfo, vk::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16608,7 +16613,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result allocateDescriptorSets( const vk::DescriptorSetAllocateInfo* pAllocateInfo, vk::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16623,10 +16628,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result allocateMemory( const vk::MemoryAllocateInfo* pAllocateInfo, const vk::AllocationCallbacks* pAllocator, vk::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16634,69 +16639,69 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const vk::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const vk::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindBufferMemory2( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const vk::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindBufferMemory2KHR( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const vk::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindImageMemory2( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindImageMemory2( ArrayProxy<const vk::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result bindImageMemory2KHR( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const vk::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createAccelerationStructureNV( const vk::AccelerationStructureCreateInfoNV* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16704,10 +16709,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createBuffer( const vk::BufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16715,10 +16720,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createBufferView( const vk::BufferViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16726,10 +16731,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createCommandPool( const vk::CommandPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16737,29 +16742,29 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createComputePipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::ComputePipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<Pipeline>::type createComputePipeline( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<Pipeline>::type createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createComputePipelineUnique( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorPool( const vk::DescriptorPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16767,10 +16772,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorSetLayout( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16778,10 +16783,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorUpdateTemplate( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16789,10 +16794,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDescriptorUpdateTemplateKHR( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16800,10 +16805,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createEvent( const vk::EventCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Event,Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16811,10 +16816,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createFence( const vk::FenceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16822,10 +16827,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createFramebuffer( const vk::FramebufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16833,29 +16838,29 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createGraphicsPipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::GraphicsPipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<Pipeline>::type createGraphicsPipeline( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<Pipeline>::type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createGraphicsPipelineUnique( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createImage( const vk::ImageCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Image,Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16863,10 +16868,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createImageView( const vk::ImageViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16874,10 +16879,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createIndirectCommandsLayoutNVX( const vk::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16885,10 +16890,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createObjectTableNVX( const vk::ObjectTableCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16896,10 +16901,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createPipelineCache( const vk::PipelineCacheCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16907,10 +16912,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createPipelineLayout( const vk::PipelineLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16918,10 +16923,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createQueryPool( const vk::QueryPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16929,29 +16934,29 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::RayTracingPipelineCreateInfoNV* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<Pipeline,Allocator>>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<Pipeline>::type createRayTracingPipelineNV( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<Pipeline>::type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<UniquePipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createRayTracingPipelineNVUnique( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRenderPass( const vk::RenderPassCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16959,21 +16964,32 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createRenderPass2KHR( const vk::RenderPassCreateInfo2KHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSampler( const vk::SamplerCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16981,10 +16997,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSamplerYcbcrConversion( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -16992,10 +17008,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSamplerYcbcrConversionKHR( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17003,10 +17019,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSemaphore( const vk::SemaphoreCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17014,10 +17030,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createShaderModule( const vk::ShaderModuleCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17025,29 +17041,29 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSharedSwapchainsKHR( uint32_t swapchainCount, const vk::SwapchainCreateInfoKHR* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<UniqueSwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createSwapchainKHR( const vk::SwapchainCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17055,10 +17071,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createValidationCacheEXT( const vk::ValidationCacheCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17066,580 +17082,622 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result debugMarkerSetObjectNameEXT( const vk::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result debugMarkerSetObjectTagEXT( const vk::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBuffer( vk::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferView( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyBufferView( vk::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCommandPool( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyCommandPool( vk::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorPool( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorPool( vk::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyEvent( vk::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFence( vk::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFramebuffer( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyFramebuffer( vk::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImage( vk::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyImageView( vk::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyObjectTableNVX( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyObjectTableNVX( vk::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipeline( vk::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineCache( vk::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineLayout( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyPipelineLayout( vk::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyQueryPool( vk::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyRenderPass( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyRenderPass( vk::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySampler( vk::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySemaphore( vk::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyShaderModule( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyShaderModule( vk::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySwapchainKHR( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySwapchainKHR( vk::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result displayPowerControlEXT( vk::DisplayKHR display, const vk::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type displayPowerControlEXT( vk::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const vk::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeCommandBuffers( vk::CommandPool commandPool, ArrayProxy<const vk::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( vk::CommandPool commandPool, ArrayProxy<const vk::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result freeDescriptorSets( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type freeDescriptorSets( vk::DescriptorPool descriptorPool, ArrayProxy<const vk::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result free( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type free( vk::DescriptorPool descriptorPool, ArrayProxy<const vk::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void freeMemory( vk::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void free( vk::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getAccelerationStructureMemoryRequirementsNV( const vk::AccelerationStructureMemoryRequirementsInfoNV* pInfo, vk::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, vk::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddressEXT( const vk::BufferDeviceAddressInfoEXT* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfoEXT & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements( vk::Buffer buffer, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements getBufferMemoryRequirements( vk::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements2( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getBufferMemoryRequirements2KHR( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getCalibratedTimestampsEXT( uint32_t timestampCount, const vk::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const vk::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getDescriptorSetLayoutSupport( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getDescriptorSetLayoutSupportKHR( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getGroupPresentCapabilitiesKHR( vk::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getGroupSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryCommitment( vk::DeviceMemory memory, vk::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::DeviceSize getMemoryCommitment( vk::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
@@ -17650,84 +17708,94 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, vk::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueue2( const vk::DeviceQueueInfo2* pQueueInfo, vk::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getEventStatus( vk::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceFdKHR( const vk::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceStatus( vk::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getFenceWin32HandleKHR( const vk::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageDrmFormatModifierPropertiesEXT( vk::Image image, vk::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( vk::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements( vk::Image image, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements getImageMemoryRequirements( vk::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements2( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageMemoryRequirements2KHR( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements( vk::Image image, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( vk::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( vk::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17736,7 +17804,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSparseMemoryRequirements2KHR( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<SparseImageMemoryRequirements2,Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17745,14 +17813,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getImageSubresourceLayout( vk::Image image, const vk::ImageSubresource* pSubresource, vk::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::SubresourceLayout getImageSubresourceLayout( vk::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- uint32_t getImageViewHandleNVX( const vk::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
@@ -17760,88 +17828,88 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryAndroidHardwareBufferANDROID( const vk::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryFdKHR( const vk::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, vk::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, vk::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryWin32HandleKHR( const vk::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<HANDLE>::type getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, vk::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, vk::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, vk::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::PerformanceValueINTEL>::type getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineCacheData( vk::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( vk::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( vk::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineExecutableInternalRepresentationsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, vk::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,Allocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17850,7 +17918,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineExecutablePropertiesKHR( const vk::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, vk::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR,Allocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17859,7 +17927,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPipelineExecutableStatisticsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, vk::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PipelineExecutableStatisticKHR,Allocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -17868,316 +17936,356 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, vk::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getRenderAreaGranularity( vk::RenderPass renderPass, vk::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::Extent2D getRenderAreaGranularity( vk::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreCounterValueKHR( vk::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( vk::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreFdKHR( const vk::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSemaphoreWin32HandleKHR( const vk::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<uint64_t>::type getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainImagesKHR( vk::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, vk::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSwapchainStatusKHR( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<uint8_t,Allocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importFenceFdKHR( const vk::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importFenceWin32HandleKHR( const vk::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importSemaphoreFdKHR( const vk::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result importSemaphoreWin32HandleKHR( const vk::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result initializePerformanceApiINTEL( const vk::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const vk::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void*>::type mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void*>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result mergePipelineCaches( vk::PipelineCache dstCache, uint32_t srcCacheCount, const vk::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type mergePipelineCaches( vk::PipelineCache dstCache, ArrayProxy<const vk::PipelineCache> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const vk::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, ArrayProxy<const vk::ValidationCacheEXT> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result registerEventEXT( const vk::DeviceEventInfoEXT* pDeviceEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result registerDisplayEventEXT( vk::DisplayKHR display, const vk::DisplayEventInfoEXT* pDisplayEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Fence>::type registerDisplayEventEXT( vk::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result registerObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type registerObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy<const vk::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void releaseProfilingLockKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetEvent( vk::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type resetEvent( vk::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result resetFences( uint32_t fenceCount, const vk::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type resetFences( ArrayProxy<const vk::Fence> fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void resetQueryPoolEXT( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setDebugUtilsObjectNameEXT( const vk::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setDebugUtilsObjectTagEXT( const vk::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result setEvent( vk::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type setEvent( vk::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( uint32_t swapchainCount, const vk::SwapchainKHR* pSwapchains, const vk::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setHdrMetadataEXT( ArrayProxy<const vk::SwapchainKHR> swapchains, ArrayProxy<const vk::HdrMetadataEXT> metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void setLocalDimmingAMD( vk::SwapchainKHR swapChain, vk::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result signalSemaphoreKHR( const vk::SemaphoreSignalInfoKHR* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfoKHR & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void trimCommandPool( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void trimCommandPoolKHR( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void unmapMemory( vk::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result unregisterObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type unregisterObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy<const vk::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSetWithTemplate( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSetWithTemplateKHR( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const vk::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void updateDescriptorSets( ArrayProxy<const vk::WriteDescriptorSet> descriptorWrites, ArrayProxy<const vk::CopyDescriptorSet> descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitForFences( uint32_t fenceCount, const vk::Fence* pFences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitForFences( ArrayProxy<const vk::Fence> fences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitSemaphoresKHR( const vk::SemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result waitSemaphoresKHR( const SemaphoreWaitInfoKHR & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
@@ -18337,18 +18445,18 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result acquireXlibDisplayEXT( Display* dpy, vk::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<Display>::type acquireXlibDisplayEXT( vk::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<Display>::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDevice( const vk::DeviceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Device,Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18356,14 +18464,14 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDisplayModeKHR( vk::DisplayKHR display, const vk::DisplayModeCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DisplayModeKHR>::type createDisplayModeKHR( vk::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18372,7 +18480,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18381,39 +18489,48 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayModeProperties2KHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ template<typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Allocator const& vectorAllocator, Dispatch const &d ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( vk::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayModePropertiesKHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( vk::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneCapabilities2KHR( const vk::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, vk::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, vk::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, vk::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18422,7 +18539,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, vk::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<TimeDomainEXT,Allocator>>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18431,7 +18548,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, vk::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV,Allocator>>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18440,7 +18557,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, vk::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<DisplayPlaneProperties2KHR,Allocator>>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18449,7 +18566,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18458,7 +18575,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayProperties2KHR( uint32_t* pPropertyCount, vk::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<DisplayProperties2KHR,Allocator>>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18467,7 +18584,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18476,204 +18593,211 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferProperties( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalBufferPropertiesKHR( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalFenceProperties( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalFencePropertiesKHR( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, vk::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphoreProperties( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getExternalSemaphorePropertiesKHR( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures( vk::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures2( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFeatures2KHR( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties( vk::Format format, vk::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::FormatProperties getFormatProperties( vk::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties2( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::FormatProperties2 getFormatProperties2( vk::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getFormatProperties2( vk::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getFormatProperties2KHR( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::FormatProperties2 getFormatProperties2KHR( vk::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- StructureChain<X, Y, Z...> getFormatProperties2KHR( vk::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getGeneratedCommandsPropertiesNVX( vk::DeviceGeneratedCommandsFeaturesNVX* pFeatures, vk::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ImageFormatProperties>::type getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageFormatProperties2( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getImageFormatProperties2KHR( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties( vk::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties2( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMemoryProperties2KHR( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, vk::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::MultisamplePropertiesEXT getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getPresentRectanglesKHR( vk::SurfaceKHR surface, uint32_t* pRectCount, vk::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<Rect2D,Allocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties( vk::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties2( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getProperties2KHR( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- vk::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
StructureChain<X, Y, Z...> getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18682,7 +18806,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18695,7 +18819,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<QueueFamilyProperties2,Allocator> getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18708,16 +18832,16 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, uint32_t* pPropertyCount, vk::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18726,7 +18850,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void getSparseImageFormatProperties2KHR( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
std::vector<SparseImageFormatProperties2,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18735,7 +18859,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, vk::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV,Allocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18744,30 +18868,30 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, vk::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceCapabilities2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename X, typename Y, typename ...Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, vk::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceFormats2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, vk::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<SurfaceFormat2KHR,Allocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18776,17 +18900,17 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceFormatsKHR( vk::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, vk::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18796,19 +18920,28 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfacePresentModesKHR( vk::SurfaceKHR surface, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
template<typename Allocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
+ typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, vk::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ template<typename Allocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+ typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
@@ -18845,19 +18978,19 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, vk::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#else
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<void>::type releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<void>::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
@@ -18947,10 +19080,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createAndroidSurfaceKHR( const vk::AndroidSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18959,10 +19092,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDebugReportCallbackEXT( const vk::DebugReportCallbackCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18970,10 +19103,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDebugUtilsMessengerEXT( const vk::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18981,10 +19114,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createDisplayPlaneSurfaceKHR( const vk::DisplaySurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -18992,10 +19125,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createHeadlessSurfaceEXT( const vk::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19004,10 +19137,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_IOS_MVK
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createIOSSurfaceMVK( const vk::IOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19017,10 +19150,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_FUCHSIA
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createImagePipeSurfaceFUCHSIA( const vk::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19030,10 +19163,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_MACOS_MVK
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createMacOSSurfaceMVK( const vk::MacOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19043,10 +19176,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_METAL_EXT
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createMetalSurfaceEXT( const vk::MetalSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19056,10 +19189,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_GGP
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createStreamDescriptorSurfaceGGP( const vk::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19069,10 +19202,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_VI_NN
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createViSurfaceNN( const vk::ViSurfaceCreateInfoNN* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19082,10 +19215,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createWaylandSurfaceKHR( const vk::WaylandSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19095,10 +19228,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createWin32SurfaceKHR( const vk::Win32SurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19108,10 +19241,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XCB_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createXcbSurfaceKHR( const vk::XcbSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19121,10 +19254,10 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_KHR
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createXlibSurfaceKHR( const vk::XlibSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19133,63 +19266,63 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySurfaceKHR( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroySurfaceKHR( vk::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void destroy( vk::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19198,7 +19331,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PhysicalDeviceGroupProperties,Allocator>>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19207,7 +19340,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, vk::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
+ Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const;
@@ -19223,10 +19356,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const vk::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- void submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
+ void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
@@ -19261,10 +19394,10 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result createInstance( const vk::InstanceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+ Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<vk::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+ typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
@@ -19272,7 +19405,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+ Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
@@ -19281,7 +19414,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+ Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
@@ -19290,93 +19423,52 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+ Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
- ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
-#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ typename ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER );
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ struct GeometryTrianglesNV
+ {
+ VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {},
+ uint32_t vertexCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {},
+ VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Buffer indexData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {},
+ uint32_t indexCount_ = {},
+ VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
+ VULKAN_HPP_NAMESPACE::Buffer transformData_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : vertexData( vertexData_ )
+ , vertexOffset( vertexOffset_ )
+ , vertexCount( vertexCount_ )
+ , vertexStride( vertexStride_ )
+ , vertexFormat( vertexFormat_ )
+ , indexData( indexData_ )
+ , indexOffset( indexOffset_ )
+ , indexCount( indexCount_ )
+ , indexType( indexType_ )
+ , transformData( transformData_ )
+ , transformOffset( transformOffset_ )
+ {}
- namespace layout
- {
- struct GeometryTrianglesNV
+ VULKAN_HPP_NAMESPACE::GeometryTrianglesNV & operator=( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( vk::Buffer vertexData_ = vk::Buffer(),
- vk::DeviceSize vertexOffset_ = 0,
- uint32_t vertexCount_ = 0,
- vk::DeviceSize vertexStride_ = 0,
- vk::Format vertexFormat_ = vk::Format::eUndefined,
- vk::Buffer indexData_ = vk::Buffer(),
- vk::DeviceSize indexOffset_ = 0,
- uint32_t indexCount_ = 0,
- vk::IndexType indexType_ = vk::IndexType::eUint16,
- vk::Buffer transformData_ = vk::Buffer(),
- vk::DeviceSize transformOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : vertexData( vertexData_ )
- , vertexOffset( vertexOffset_ )
- , vertexCount( vertexCount_ )
- , vertexStride( vertexStride_ )
- , vertexFormat( vertexFormat_ )
- , indexData( indexData_ )
- , indexOffset( indexOffset_ )
- , indexCount( indexCount_ )
- , indexType( indexType_ )
- , transformData( transformData_ )
- , transformOffset( transformOffset_ )
- {}
-
- GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGeometryTrianglesNV*>(this) = rhs;
- }
-
- GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGeometryTrianglesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eGeometryTrianglesNV;
- const void* pNext = nullptr;
- vk::Buffer vertexData;
- vk::DeviceSize vertexOffset;
- uint32_t vertexCount;
- vk::DeviceSize vertexStride;
- vk::Format vertexFormat;
- vk::Buffer indexData;
- vk::DeviceSize indexOffset;
- uint32_t indexCount;
- vk::IndexType indexType;
- vk::Buffer transformData;
- vk::DeviceSize transformOffset;
- };
- static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct GeometryTrianglesNV : public layout::GeometryTrianglesNV
- {
- VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( vk::Buffer vertexData_ = vk::Buffer(),
- vk::DeviceSize vertexOffset_ = 0,
- uint32_t vertexCount_ = 0,
- vk::DeviceSize vertexStride_ = 0,
- vk::Format vertexFormat_ = vk::Format::eUndefined,
- vk::Buffer indexData_ = vk::Buffer(),
- vk::DeviceSize indexOffset_ = 0,
- uint32_t indexCount_ = 0,
- vk::IndexType indexType_ = vk::IndexType::eUint16,
- vk::Buffer transformData_ = vk::Buffer(),
- vk::DeviceSize transformOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::GeometryTrianglesNV( vertexData_, vertexOffset_, vertexCount_, vertexStride_, vertexFormat_, indexData_, indexOffset_, indexCount_, indexType_, transformData_, transformOffset_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) - offsetof( GeometryTrianglesNV, pNext ) );
+ return *this;
+ }
GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::GeometryTrianglesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::GeometryTrianglesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>(&rhs);
return *this;
}
@@ -19386,13 +19478,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GeometryTrianglesNV & setVertexData( vk::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
{
vertexData = vertexData_;
return *this;
}
- GeometryTrianglesNV & setVertexOffset( vk::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
{
vertexOffset = vertexOffset_;
return *this;
@@ -19404,25 +19496,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GeometryTrianglesNV & setVertexStride( vk::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
{
vertexStride = vertexStride_;
return *this;
}
- GeometryTrianglesNV & setVertexFormat( vk::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
{
vertexFormat = vertexFormat_;
return *this;
}
- GeometryTrianglesNV & setIndexData( vk::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
{
indexData = indexData_;
return *this;
}
- GeometryTrianglesNV & setIndexOffset( vk::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
{
indexOffset = indexOffset_;
return *this;
@@ -19434,19 +19526,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GeometryTrianglesNV & setIndexType( vk::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
}
- GeometryTrianglesNV & setTransformData( vk::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
{
transformData = transformData_;
return *this;
}
- GeometryTrianglesNV & setTransformOffset( vk::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
+ GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
{
transformOffset = transformOffset_;
return *this;
@@ -19484,65 +19576,50 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::GeometryTrianglesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
+ uint32_t vertexCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
+ VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Buffer indexData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
+ uint32_t indexCount = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
+ VULKAN_HPP_NAMESPACE::Buffer transformData = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
};
static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct GeometryAABBNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR GeometryAABBNV( vk::Buffer aabbData_ = vk::Buffer(),
- uint32_t numAABBs_ = 0,
- uint32_t stride_ = 0,
- vk::DeviceSize offset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : aabbData( aabbData_ )
- , numAABBs( numAABBs_ )
- , stride( stride_ )
- , offset( offset_ )
- {}
-
- GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGeometryAABBNV*>(this) = rhs;
- }
-
- GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGeometryAABBNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eGeometryAabbNV;
- const void* pNext = nullptr;
- vk::Buffer aabbData;
- uint32_t numAABBs;
- uint32_t stride;
- vk::DeviceSize offset;
- };
- static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "layout struct and wrapper have different size!" );
- }
-
- struct GeometryAABBNV : public layout::GeometryAABBNV
+ struct GeometryAABBNV
{
- VULKAN_HPP_CONSTEXPR GeometryAABBNV( vk::Buffer aabbData_ = vk::Buffer(),
- uint32_t numAABBs_ = 0,
- uint32_t stride_ = 0,
- vk::DeviceSize offset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::GeometryAABBNV( aabbData_, numAABBs_, stride_, offset_ )
+ VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {},
+ uint32_t numAABBs_ = {},
+ uint32_t stride_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : aabbData( aabbData_ )
+ , numAABBs( numAABBs_ )
+ , stride( stride_ )
+ , offset( offset_ )
{}
+ VULKAN_HPP_NAMESPACE::GeometryAABBNV & operator=( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) - offsetof( GeometryAABBNV, pNext ) );
+ return *this;
+ }
+
GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::GeometryAABBNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::GeometryAABBNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>(&rhs);
return *this;
}
@@ -19552,7 +19629,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GeometryAABBNV & setAabbData( vk::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
{
aabbData = aabbData_;
return *this;
@@ -19570,7 +19647,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GeometryAABBNV & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
@@ -19601,38 +19678,43 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::GeometryAABBNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
+ uint32_t numAABBs = {};
+ uint32_t stride = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
};
static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
struct GeometryDataNV
{
- VULKAN_HPP_CONSTEXPR GeometryDataNV( vk::GeometryTrianglesNV triangles_ = vk::GeometryTrianglesNV(),
- vk::GeometryAABBNV aabbs_ = vk::GeometryAABBNV() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT
: triangles( triangles_ )
, aabbs( aabbs_ )
{}
GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkGeometryDataNV*>(this) = rhs;
+ *this = rhs;
}
GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkGeometryDataNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>(&rhs);
return *this;
}
- GeometryDataNV & setTriangles( vk::GeometryTrianglesNV triangles_ ) VULKAN_HPP_NOEXCEPT
+ GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ ) VULKAN_HPP_NOEXCEPT
{
triangles = triangles_;
return *this;
}
- GeometryDataNV & setAabbs( vk::GeometryAABBNV aabbs_ ) VULKAN_HPP_NOEXCEPT
+ GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ ) VULKAN_HPP_NOEXCEPT
{
aabbs = aabbs_;
return *this;
@@ -19660,61 +19742,36 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::GeometryTrianglesNV triangles;
- vk::GeometryAABBNV aabbs;
+ VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
+ VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
};
static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct GeometryNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR GeometryNV( vk::GeometryTypeNV geometryType_ = vk::GeometryTypeNV::eTriangles,
- vk::GeometryDataNV geometry_ = vk::GeometryDataNV(),
- vk::GeometryFlagsNV flags_ = vk::GeometryFlagsNV() ) VULKAN_HPP_NOEXCEPT
- : geometryType( geometryType_ )
- , geometry( geometry_ )
- , flags( flags_ )
- {}
-
- GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGeometryNV*>(this) = rhs;
- }
-
- GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGeometryNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eGeometryNV;
- const void* pNext = nullptr;
- vk::GeometryTypeNV geometryType;
- vk::GeometryDataNV geometry;
- vk::GeometryFlagsNV flags;
- };
- static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "layout struct and wrapper have different size!" );
- }
-
- struct GeometryNV : public layout::GeometryNV
+ struct GeometryNV
{
- VULKAN_HPP_CONSTEXPR GeometryNV( vk::GeometryTypeNV geometryType_ = vk::GeometryTypeNV::eTriangles,
- vk::GeometryDataNV geometry_ = vk::GeometryDataNV(),
- vk::GeometryFlagsNV flags_ = vk::GeometryFlagsNV() ) VULKAN_HPP_NOEXCEPT
- : layout::GeometryNV( geometryType_, geometry_, flags_ )
+ VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles,
+ VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {},
+ VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : geometryType( geometryType_ )
+ , geometry( geometry_ )
+ , flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::GeometryNV & operator=( VULKAN_HPP_NAMESPACE::GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) - offsetof( GeometryNV, pNext ) );
+ return *this;
+ }
+
GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::GeometryNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::GeometryNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>(&rhs);
return *this;
}
@@ -19724,19 +19781,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GeometryNV & setGeometryType( vk::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT
+ GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT
{
geometryType = geometryType_;
return *this;
}
- GeometryNV & setGeometry( vk::GeometryDataNV geometry_ ) VULKAN_HPP_NOEXCEPT
+ GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ ) VULKAN_HPP_NOEXCEPT
{
geometry = geometry_;
return *this;
}
- GeometryNV & setFlags( vk::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -19766,69 +19823,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::GeometryNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles;
+ VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
+ VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags = {};
};
static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct AccelerationStructureInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( vk::AccelerationStructureTypeNV type_ = vk::AccelerationStructureTypeNV::eTopLevel,
- vk::BuildAccelerationStructureFlagsNV flags_ = vk::BuildAccelerationStructureFlagsNV(),
- uint32_t instanceCount_ = 0,
- uint32_t geometryCount_ = 0,
- const vk::GeometryNV* pGeometries_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , flags( flags_ )
- , instanceCount( instanceCount_ )
- , geometryCount( geometryCount_ )
- , pGeometries( pGeometries_ )
- {}
-
- AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAccelerationStructureInfoNV*>(this) = rhs;
- }
-
- AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAccelerationStructureInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAccelerationStructureInfoNV;
- const void* pNext = nullptr;
- vk::AccelerationStructureTypeNV type;
- vk::BuildAccelerationStructureFlagsNV flags;
- uint32_t instanceCount;
- uint32_t geometryCount;
- const vk::GeometryNV* pGeometries;
- };
- static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct AccelerationStructureInfoNV : public layout::AccelerationStructureInfoNV
+ struct AccelerationStructureInfoNV
{
- VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( vk::AccelerationStructureTypeNV type_ = vk::AccelerationStructureTypeNV::eTopLevel,
- vk::BuildAccelerationStructureFlagsNV flags_ = vk::BuildAccelerationStructureFlagsNV(),
- uint32_t instanceCount_ = 0,
- uint32_t geometryCount_ = 0,
- const vk::GeometryNV* pGeometries_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::AccelerationStructureInfoNV( type_, flags_, instanceCount_, geometryCount_, pGeometries_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel,
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {},
+ uint32_t instanceCount_ = {},
+ uint32_t geometryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT
+ : type( type_ )
+ , flags( flags_ )
+ , instanceCount( instanceCount_ )
+ , geometryCount( geometryCount_ )
+ , pGeometries( pGeometries_ )
{}
+ VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) );
+ return *this;
+ }
+
AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AccelerationStructureInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AccelerationStructureInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>(&rhs);
return *this;
}
@@ -19838,13 +19870,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AccelerationStructureInfoNV & setType( vk::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- AccelerationStructureInfoNV & setFlags( vk::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -19862,7 +19894,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AccelerationStructureInfoNV & setPGeometries( const vk::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
{
pGeometries = pGeometries_;
return *this;
@@ -19894,57 +19926,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AccelerationStructureInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel;
+ VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
+ uint32_t instanceCount = {};
+ uint32_t geometryCount = {};
+ const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {};
};
static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct AccelerationStructureCreateInfoNV
{
- struct AccelerationStructureCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( vk::DeviceSize compactedSize_ = 0,
- vk::AccelerationStructureInfoNV info_ = vk::AccelerationStructureInfoNV() ) VULKAN_HPP_NOEXCEPT
- : compactedSize( compactedSize_ )
- , info( info_ )
- {}
-
- AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>(this) = rhs;
- }
-
- AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
- const void* pNext = nullptr;
- vk::DeviceSize compactedSize;
- vk::AccelerationStructureInfoNV info;
- };
- static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct AccelerationStructureCreateInfoNV : public layout::AccelerationStructureCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( vk::DeviceSize compactedSize_ = 0,
- vk::AccelerationStructureInfoNV info_ = vk::AccelerationStructureInfoNV() ) VULKAN_HPP_NOEXCEPT
- : layout::AccelerationStructureCreateInfoNV( compactedSize_, info_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {},
+ VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT
+ : compactedSize( compactedSize_ )
+ , info( info_ )
{}
+ VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) );
+ return *this;
+ }
+
AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AccelerationStructureCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AccelerationStructureCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>(&rhs);
return *this;
}
@@ -19954,13 +19969,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AccelerationStructureCreateInfoNV & setCompactedSize( vk::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
{
compactedSize = compactedSize_;
return *this;
}
- AccelerationStructureCreateInfoNV & setInfo( vk::AccelerationStructureInfoNV info_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ ) VULKAN_HPP_NOEXCEPT
{
info = info_;
return *this;
@@ -19989,57 +20004,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AccelerationStructureCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
};
static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct AccelerationStructureMemoryRequirementsInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( vk::AccelerationStructureMemoryRequirementsTypeNV type_ = vk::AccelerationStructureMemoryRequirementsTypeNV::eObject,
- vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV() ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , accelerationStructure( accelerationStructure_ )
- {}
-
- AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>(this) = rhs;
- }
-
- AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
- const void* pNext = nullptr;
- vk::AccelerationStructureMemoryRequirementsTypeNV type;
- vk::AccelerationStructureNV accelerationStructure;
- };
- static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct AccelerationStructureMemoryRequirementsInfoNV : public layout::AccelerationStructureMemoryRequirementsInfoNV
+ struct AccelerationStructureMemoryRequirementsInfoNV
{
- VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( vk::AccelerationStructureMemoryRequirementsTypeNV type_ = vk::AccelerationStructureMemoryRequirementsTypeNV::eObject,
- vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV() ) VULKAN_HPP_NOEXCEPT
- : layout::AccelerationStructureMemoryRequirementsInfoNV( type_, accelerationStructure_ )
+ VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject,
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
+ : type( type_ )
+ , accelerationStructure( accelerationStructure_ )
{}
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) );
+ return *this;
+ }
+
AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AccelerationStructureMemoryRequirementsInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AccelerationStructureMemoryRequirementsInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>(&rhs);
return *this;
}
@@ -20049,13 +20044,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AccelerationStructureMemoryRequirementsInfoNV & setType( vk::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( vk::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
@@ -20084,69 +20079,43 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AccelerationStructureMemoryRequirementsInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
};
static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct AcquireNextImageInfoKHR
{
- struct AcquireNextImageInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(),
- uint64_t timeout_ = 0,
- vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::Fence fence_ = vk::Fence(),
- uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
- , timeout( timeout_ )
- , semaphore( semaphore_ )
- , fence( fence_ )
- , deviceMask( deviceMask_ )
- {}
-
- AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAcquireNextImageInfoKHR*>(this) = rhs;
- }
-
- AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAcquireNextImageInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
- const void* pNext = nullptr;
- vk::SwapchainKHR swapchain;
- uint64_t timeout;
- vk::Semaphore semaphore;
- vk::Fence fence;
- uint32_t deviceMask;
- };
- static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct AcquireNextImageInfoKHR : public layout::AcquireNextImageInfoKHR
- {
- VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(),
- uint64_t timeout_ = 0,
- vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::Fence fence_ = vk::Fence(),
- uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::AcquireNextImageInfoKHR( swapchain_, timeout_, semaphore_, fence_, deviceMask_ )
+ VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
+ uint64_t timeout_ = {},
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ : swapchain( swapchain_ )
+ , timeout( timeout_ )
+ , semaphore( semaphore_ )
+ , fence( fence_ )
+ , deviceMask( deviceMask_ )
{}
+ VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) - offsetof( AcquireNextImageInfoKHR, pNext ) );
+ return *this;
+ }
+
AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AcquireNextImageInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AcquireNextImageInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>(&rhs);
return *this;
}
@@ -20156,7 +20125,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AcquireNextImageInfoKHR & setSwapchain( vk::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+ AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
@@ -20168,13 +20137,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AcquireNextImageInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- AcquireNextImageInfoKHR & setFence( vk::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
@@ -20212,20 +20181,101 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AcquireNextImageInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+ uint64_t timeout = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ uint32_t deviceMask = {};
};
static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ struct AcquireProfilingLockInfoKHR
+ {
+ VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {},
+ uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , timeout( timeout_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) - offsetof( AcquireProfilingLockInfoKHR, pNext ) );
+ return *this;
+ }
+
+ AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ AcquireProfilingLockInfoKHR& operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>(&rhs);
+ return *this;
+ }
+
+ AcquireProfilingLockInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ {
+ flags = flags_;
+ return *this;
+ }
+
+ AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ timeout = timeout_;
+ return *this;
+ }
+
+ operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
+ }
+
+ operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
+ }
+
+ bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( timeout == rhs.timeout );
+ }
+
+ bool operator!=( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
+ uint64_t timeout = {};
+ };
+ static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
struct AllocationCallbacks
{
- VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = nullptr,
- PFN_vkAllocationFunction pfnAllocation_ = nullptr,
- PFN_vkReallocationFunction pfnReallocation_ = nullptr,
- PFN_vkFreeFunction pfnFree_ = nullptr,
- PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr,
- PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = {},
+ PFN_vkAllocationFunction pfnAllocation_ = {},
+ PFN_vkReallocationFunction pfnReallocation_ = {},
+ PFN_vkFreeFunction pfnFree_ = {},
+ PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
+ PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
: pUserData( pUserData_ )
, pfnAllocation( pfnAllocation_ )
, pfnReallocation( pfnReallocation_ )
@@ -20236,12 +20286,12 @@ namespace VULKAN_HPP_NAMESPACE
AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAllocationCallbacks*>(this) = rhs;
+ *this = rhs;
}
AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAllocationCallbacks*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>(&rhs);
return *this;
}
@@ -20307,22 +20357,22 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- void* pUserData;
- PFN_vkAllocationFunction pfnAllocation;
- PFN_vkReallocationFunction pfnReallocation;
- PFN_vkFreeFunction pfnFree;
- PFN_vkInternalAllocationNotification pfnInternalAllocation;
- PFN_vkInternalFreeNotification pfnInternalFree;
+ void* pUserData = {};
+ PFN_vkAllocationFunction pfnAllocation = {};
+ PFN_vkReallocationFunction pfnReallocation = {};
+ PFN_vkFreeFunction pfnFree = {};
+ PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
+ PFN_vkInternalFreeNotification pfnInternalFree = {};
};
static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
struct ComponentMapping
{
- VULKAN_HPP_CONSTEXPR ComponentMapping( vk::ComponentSwizzle r_ = vk::ComponentSwizzle::eIdentity,
- vk::ComponentSwizzle g_ = vk::ComponentSwizzle::eIdentity,
- vk::ComponentSwizzle b_ = vk::ComponentSwizzle::eIdentity,
- vk::ComponentSwizzle a_ = vk::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT
: r( r_ )
, g( g_ )
, b( b_ )
@@ -20331,34 +20381,34 @@ namespace VULKAN_HPP_NAMESPACE
ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkComponentMapping*>(this) = rhs;
+ *this = rhs;
}
ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkComponentMapping*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>(&rhs);
return *this;
}
- ComponentMapping & setR( vk::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
+ ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
{
r = r_;
return *this;
}
- ComponentMapping & setG( vk::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
+ ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
{
g = g_;
return *this;
}
- ComponentMapping & setB( vk::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
+ ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
{
b = b_;
return *this;
}
- ComponentMapping & setA( vk::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
+ ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
{
a = a_;
return *this;
@@ -20388,63 +20438,50 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ComponentSwizzle r;
- vk::ComponentSwizzle g;
- vk::ComponentSwizzle b;
- vk::ComponentSwizzle a;
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
+ VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
};
static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ComponentMapping>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
+ struct AndroidHardwareBufferFormatPropertiesANDROID
{
- struct AndroidHardwareBufferFormatPropertiesANDROID
- {
- protected:
- AndroidHardwareBufferFormatPropertiesANDROID() VULKAN_HPP_NOEXCEPT
- {}
-
- AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>(this) = rhs;
- }
-
- AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
- void* pNext = nullptr;
- vk::Format format;
- uint64_t externalFormat;
- vk::FormatFeatureFlags formatFeatures;
- vk::ComponentMapping samplerYcbcrConversionComponents;
- vk::SamplerYcbcrModelConversion suggestedYcbcrModel;
- vk::SamplerYcbcrRange suggestedYcbcrRange;
- vk::ChromaLocation suggestedXChromaOffset;
- vk::ChromaLocation suggestedYChromaOffset;
- };
- static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "layout struct and wrapper have different size!" );
- }
-
- struct AndroidHardwareBufferFormatPropertiesANDROID : public layout::AndroidHardwareBufferFormatPropertiesANDROID
- {
- AndroidHardwareBufferFormatPropertiesANDROID() VULKAN_HPP_NOEXCEPT
- : layout::AndroidHardwareBufferFormatPropertiesANDROID()
+ AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint64_t externalFormat_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {},
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
+ : format( format_ )
+ , externalFormat( externalFormat_ )
+ , formatFeatures( formatFeatures_ )
+ , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
+ , suggestedYcbcrModel( suggestedYcbcrModel_ )
+ , suggestedYcbcrRange( suggestedYcbcrRange_ )
+ , suggestedXChromaOffset( suggestedXChromaOffset_ )
+ , suggestedYChromaOffset( suggestedYChromaOffset_ )
{}
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) );
+ return *this;
+ }
+
AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AndroidHardwareBufferFormatPropertiesANDROID( rhs )
- {}
+ {
+ *this = rhs;
+ }
AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AndroidHardwareBufferFormatPropertiesANDROID::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>(&rhs);
return *this;
}
@@ -20477,8 +20514,17 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AndroidHardwareBufferFormatPropertiesANDROID::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint64_t externalFormat = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
+ VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
};
static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
@@ -20486,47 +20532,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
- {
- struct AndroidHardwareBufferPropertiesANDROID
- {
- protected:
- AndroidHardwareBufferPropertiesANDROID() VULKAN_HPP_NOEXCEPT
- {}
-
- AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>(this) = rhs;
- }
-
- AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
- void* pNext = nullptr;
- vk::DeviceSize allocationSize;
- uint32_t memoryTypeBits;
- };
- static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "layout struct and wrapper have different size!" );
- }
-
- struct AndroidHardwareBufferPropertiesANDROID : public layout::AndroidHardwareBufferPropertiesANDROID
+ struct AndroidHardwareBufferPropertiesANDROID
{
- AndroidHardwareBufferPropertiesANDROID() VULKAN_HPP_NOEXCEPT
- : layout::AndroidHardwareBufferPropertiesANDROID()
+ AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
+ uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
+ : allocationSize( allocationSize_ )
+ , memoryTypeBits( memoryTypeBits_ )
{}
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) );
+ return *this;
+ }
+
AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AndroidHardwareBufferPropertiesANDROID( rhs )
- {}
+ {
+ *this = rhs;
+ }
AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AndroidHardwareBufferPropertiesANDROID::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>(&rhs);
return *this;
}
@@ -20553,8 +20580,11 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AndroidHardwareBufferPropertiesANDROID::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+ uint32_t memoryTypeBits = {};
};
static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
@@ -20562,46 +20592,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
- {
- struct AndroidHardwareBufferUsageANDROID
- {
- protected:
- AndroidHardwareBufferUsageANDROID() VULKAN_HPP_NOEXCEPT
- {}
-
- AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>(this) = rhs;
- }
-
- AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
- void* pNext = nullptr;
- uint64_t androidHardwareBufferUsage;
- };
- static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "layout struct and wrapper have different size!" );
- }
-
- struct AndroidHardwareBufferUsageANDROID : public layout::AndroidHardwareBufferUsageANDROID
+ struct AndroidHardwareBufferUsageANDROID
{
- AndroidHardwareBufferUsageANDROID() VULKAN_HPP_NOEXCEPT
- : layout::AndroidHardwareBufferUsageANDROID()
+ AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT
+ : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
{}
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID & operator=( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) );
+ return *this;
+ }
+
AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AndroidHardwareBufferUsageANDROID( rhs )
- {}
+ {
+ *this = rhs;
+ }
AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AndroidHardwareBufferUsageANDROID::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>(&rhs);
return *this;
}
@@ -20627,8 +20637,10 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AndroidHardwareBufferUsageANDROID::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
+ void* pNext = {};
+ uint64_t androidHardwareBufferUsage = {};
};
static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
@@ -20636,51 +20648,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
+ struct AndroidSurfaceCreateInfoKHR
{
- struct AndroidSurfaceCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( vk::AndroidSurfaceCreateFlagsKHR flags_ = vk::AndroidSurfaceCreateFlagsKHR(),
- struct ANativeWindow* window_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , window( window_ )
- {}
-
- AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>(this) = rhs;
- }
-
- AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
- const void* pNext = nullptr;
- vk::AndroidSurfaceCreateFlagsKHR flags;
- struct ANativeWindow* window;
- };
- static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct AndroidSurfaceCreateInfoKHR : public layout::AndroidSurfaceCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( vk::AndroidSurfaceCreateFlagsKHR flags_ = vk::AndroidSurfaceCreateFlagsKHR(),
- struct ANativeWindow* window_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::AndroidSurfaceCreateInfoKHR( flags_, window_ )
+ VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {},
+ struct ANativeWindow* window_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , window( window_ )
{}
+ VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AndroidSurfaceCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AndroidSurfaceCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -20690,7 +20679,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AndroidSurfaceCreateInfoKHR & setFlags( vk::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -20725,70 +20714,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::AndroidSurfaceCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
+ struct ANativeWindow* window = {};
};
static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- namespace layout
+ struct ApplicationInfo
{
- struct ApplicationInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = nullptr,
- uint32_t applicationVersion_ = 0,
- const char* pEngineName_ = nullptr,
- uint32_t engineVersion_ = 0,
- uint32_t apiVersion_ = 0 ) VULKAN_HPP_NOEXCEPT
- : pApplicationName( pApplicationName_ )
- , applicationVersion( applicationVersion_ )
- , pEngineName( pEngineName_ )
- , engineVersion( engineVersion_ )
- , apiVersion( apiVersion_ )
- {}
-
- ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkApplicationInfo*>(this) = rhs;
- }
-
- ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkApplicationInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eApplicationInfo;
- const void* pNext = nullptr;
- const char* pApplicationName;
- uint32_t applicationVersion;
- const char* pEngineName;
- uint32_t engineVersion;
- uint32_t apiVersion;
- };
- static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ApplicationInfo : public layout::ApplicationInfo
- {
- VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = nullptr,
- uint32_t applicationVersion_ = 0,
- const char* pEngineName_ = nullptr,
- uint32_t engineVersion_ = 0,
- uint32_t apiVersion_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ApplicationInfo( pApplicationName_, applicationVersion_, pEngineName_, engineVersion_, apiVersion_ )
+ VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = {},
+ uint32_t applicationVersion_ = {},
+ const char* pEngineName_ = {},
+ uint32_t engineVersion_ = {},
+ uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pApplicationName( pApplicationName_ )
+ , applicationVersion( applicationVersion_ )
+ , pEngineName( pEngineName_ )
+ , engineVersion( engineVersion_ )
+ , apiVersion( apiVersion_ )
{}
+ VULKAN_HPP_NAMESPACE::ApplicationInfo & operator=( VULKAN_HPP_NAMESPACE::ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) - offsetof( ApplicationInfo, pNext ) );
+ return *this;
+ }
+
ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ApplicationInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ApplicationInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>(&rhs);
return *this;
}
@@ -20854,23 +20817,29 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ApplicationInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
+ const void* pNext = {};
+ const char* pApplicationName = {};
+ uint32_t applicationVersion = {};
+ const char* pEngineName = {};
+ uint32_t engineVersion = {};
+ uint32_t apiVersion = {};
};
static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
struct AttachmentDescription
{
- VULKAN_HPP_CONSTEXPR AttachmentDescription( vk::AttachmentDescriptionFlags flags_ = vk::AttachmentDescriptionFlags(),
- vk::Format format_ = vk::Format::eUndefined,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::AttachmentLoadOp loadOp_ = vk::AttachmentLoadOp::eLoad,
- vk::AttachmentStoreOp storeOp_ = vk::AttachmentStoreOp::eStore,
- vk::AttachmentLoadOp stencilLoadOp_ = vk::AttachmentLoadOp::eLoad,
- vk::AttachmentStoreOp stencilStoreOp_ = vk::AttachmentStoreOp::eStore,
- vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout finalLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, format( format_ )
, samples( samples_ )
@@ -20884,64 +20853,64 @@ namespace VULKAN_HPP_NAMESPACE
AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAttachmentDescription*>(this) = rhs;
+ *this = rhs;
}
AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAttachmentDescription*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>(&rhs);
return *this;
}
- AttachmentDescription & setFlags( vk::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- AttachmentDescription & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- AttachmentDescription & setSamples( vk::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- AttachmentDescription & setLoadOp( vk::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
- AttachmentDescription & setStoreOp( vk::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
- AttachmentDescription & setStencilLoadOp( vk::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilLoadOp = stencilLoadOp_;
return *this;
}
- AttachmentDescription & setStencilStoreOp( vk::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilStoreOp = stencilStoreOp_;
return *this;
}
- AttachmentDescription & setInitialLayout( vk::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
- AttachmentDescription & setFinalLayout( vk::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
{
finalLayout = finalLayout_;
return *this;
@@ -20961,7 +20930,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( flags == rhs.flags )
&& ( format == rhs.format )
- && vk::operator==( samples, rhs.samples )
+ && ( samples == rhs.samples )
&& ( loadOp == rhs.loadOp )
&& ( storeOp == rhs.storeOp )
&& ( stencilLoadOp == rhs.stencilLoadOp )
@@ -20976,172 +20945,135 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::AttachmentDescriptionFlags flags;
- vk::Format format;
- vk::SampleCountFlagBits samples;
- vk::AttachmentLoadOp loadOp;
- vk::AttachmentStoreOp storeOp;
- vk::AttachmentLoadOp stencilLoadOp;
- vk::AttachmentStoreOp stencilStoreOp;
- vk::ImageLayout initialLayout;
- vk::ImageLayout finalLayout;
+ VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct AttachmentDescription2KHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR AttachmentDescription2KHR( vk::AttachmentDescriptionFlags flags_ = vk::AttachmentDescriptionFlags(),
- vk::Format format_ = vk::Format::eUndefined,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::AttachmentLoadOp loadOp_ = vk::AttachmentLoadOp::eLoad,
- vk::AttachmentStoreOp storeOp_ = vk::AttachmentStoreOp::eStore,
- vk::AttachmentLoadOp stencilLoadOp_ = vk::AttachmentLoadOp::eLoad,
- vk::AttachmentStoreOp stencilStoreOp_ = vk::AttachmentStoreOp::eStore,
- vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout finalLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , format( format_ )
- , samples( samples_ )
- , loadOp( loadOp_ )
- , storeOp( storeOp_ )
- , stencilLoadOp( stencilLoadOp_ )
- , stencilStoreOp( stencilStoreOp_ )
- , initialLayout( initialLayout_ )
- , finalLayout( finalLayout_ )
- {}
-
- AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentDescription2KHR*>(this) = rhs;
- }
-
- AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentDescription2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAttachmentDescription2KHR;
- const void* pNext = nullptr;
- vk::AttachmentDescriptionFlags flags;
- vk::Format format;
- vk::SampleCountFlagBits samples;
- vk::AttachmentLoadOp loadOp;
- vk::AttachmentStoreOp storeOp;
- vk::AttachmentLoadOp stencilLoadOp;
- vk::AttachmentStoreOp stencilStoreOp;
- vk::ImageLayout initialLayout;
- vk::ImageLayout finalLayout;
- };
- static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct AttachmentDescription2KHR : public layout::AttachmentDescription2KHR
- {
- VULKAN_HPP_CONSTEXPR AttachmentDescription2KHR( vk::AttachmentDescriptionFlags flags_ = vk::AttachmentDescriptionFlags(),
- vk::Format format_ = vk::Format::eUndefined,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::AttachmentLoadOp loadOp_ = vk::AttachmentLoadOp::eLoad,
- vk::AttachmentStoreOp storeOp_ = vk::AttachmentStoreOp::eStore,
- vk::AttachmentLoadOp stencilLoadOp_ = vk::AttachmentLoadOp::eLoad,
- vk::AttachmentStoreOp stencilStoreOp_ = vk::AttachmentStoreOp::eStore,
- vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout finalLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentDescription2KHR( flags_, format_, samples_, loadOp_, storeOp_, stencilLoadOp_, stencilStoreOp_, initialLayout_, finalLayout_ )
+ struct AttachmentDescription2
+ {
+ VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , format( format_ )
+ , samples( samples_ )
+ , loadOp( loadOp_ )
+ , storeOp( storeOp_ )
+ , stencilLoadOp( stencilLoadOp_ )
+ , stencilStoreOp( stencilStoreOp_ )
+ , initialLayout( initialLayout_ )
+ , finalLayout( finalLayout_ )
{}
- AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentDescription2KHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::AttachmentDescription2 & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) - offsetof( AttachmentDescription2, pNext ) );
+ return *this;
+ }
+
+ AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2& operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AttachmentDescription2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>(&rhs);
return *this;
}
- AttachmentDescription2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- AttachmentDescription2KHR & setFlags( vk::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- AttachmentDescription2KHR & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- AttachmentDescription2KHR & setSamples( vk::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- AttachmentDescription2KHR & setLoadOp( vk::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
{
loadOp = loadOp_;
return *this;
}
- AttachmentDescription2KHR & setStoreOp( vk::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
{
storeOp = storeOp_;
return *this;
}
- AttachmentDescription2KHR & setStencilLoadOp( vk::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilLoadOp = stencilLoadOp_;
return *this;
}
- AttachmentDescription2KHR & setStencilStoreOp( vk::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
{
stencilStoreOp = stencilStoreOp_;
return *this;
}
- AttachmentDescription2KHR & setInitialLayout( vk::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
}
- AttachmentDescription2KHR & setFinalLayout( vk::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
{
finalLayout = finalLayout_;
return *this;
}
- operator VkAttachmentDescription2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAttachmentDescription2KHR*>( this );
+ return *reinterpret_cast<const VkAttachmentDescription2*>( this );
}
- operator VkAttachmentDescription2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkAttachmentDescription2KHR*>( this );
+ return *reinterpret_cast<VkAttachmentDescription2*>( this );
}
- bool operator==( AttachmentDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( format == rhs.format )
- && vk::operator==( samples, rhs.samples )
+ && ( samples == rhs.samples )
&& ( loadOp == rhs.loadOp )
&& ( storeOp == rhs.storeOp )
&& ( stencilLoadOp == rhs.stencilLoadOp )
@@ -21150,94 +21082,81 @@ namespace VULKAN_HPP_NAMESPACE
&& ( finalLayout == rhs.finalLayout );
}
- bool operator!=( AttachmentDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::AttachmentDescription2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+ VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
+ VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AttachmentDescription2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct AttachmentDescriptionStencilLayout
{
- struct AttachmentDescriptionStencilLayoutKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayoutKHR( vk::ImageLayout stencilInitialLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout stencilFinalLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : stencilInitialLayout( stencilInitialLayout_ )
- , stencilFinalLayout( stencilFinalLayout_ )
- {}
-
- AttachmentDescriptionStencilLayoutKHR( VkAttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentDescriptionStencilLayoutKHR*>(this) = rhs;
- }
-
- AttachmentDescriptionStencilLayoutKHR& operator=( VkAttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentDescriptionStencilLayoutKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAttachmentDescriptionStencilLayoutKHR;
- void* pNext = nullptr;
- vk::ImageLayout stencilInitialLayout;
- vk::ImageLayout stencilFinalLayout;
- };
- static_assert( sizeof( AttachmentDescriptionStencilLayoutKHR ) == sizeof( VkAttachmentDescriptionStencilLayoutKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct AttachmentDescriptionStencilLayoutKHR : public layout::AttachmentDescriptionStencilLayoutKHR
- {
- VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayoutKHR( vk::ImageLayout stencilInitialLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout stencilFinalLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentDescriptionStencilLayoutKHR( stencilInitialLayout_, stencilFinalLayout_ )
+ VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ : stencilInitialLayout( stencilInitialLayout_ )
+ , stencilFinalLayout( stencilFinalLayout_ )
{}
- AttachmentDescriptionStencilLayoutKHR( VkAttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentDescriptionStencilLayoutKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout & operator=( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) - offsetof( AttachmentDescriptionStencilLayout, pNext ) );
+ return *this;
+ }
- AttachmentDescriptionStencilLayoutKHR& operator=( VkAttachmentDescriptionStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AttachmentDescriptionStencilLayoutKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ AttachmentDescriptionStencilLayout& operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>(&rhs);
return *this;
}
- AttachmentDescriptionStencilLayoutKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescriptionStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- AttachmentDescriptionStencilLayoutKHR & setStencilInitialLayout( vk::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilInitialLayout = stencilInitialLayout_;
return *this;
}
- AttachmentDescriptionStencilLayoutKHR & setStencilFinalLayout( vk::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilFinalLayout = stencilFinalLayout_;
return *this;
}
- operator VkAttachmentDescriptionStencilLayoutKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAttachmentDescriptionStencilLayoutKHR*>( this );
+ return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
}
- operator VkAttachmentDescriptionStencilLayoutKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkAttachmentDescriptionStencilLayoutKHR*>( this );
+ return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
}
- bool operator==( AttachmentDescriptionStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -21245,33 +21164,36 @@ namespace VULKAN_HPP_NAMESPACE
&& ( stencilFinalLayout == rhs.stencilFinalLayout );
}
- bool operator!=( AttachmentDescriptionStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::AttachmentDescriptionStencilLayoutKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- static_assert( sizeof( AttachmentDescriptionStencilLayoutKHR ) == sizeof( VkAttachmentDescriptionStencilLayoutKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayoutKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayout>::value, "struct wrapper is not a standard layout!" );
struct AttachmentReference
{
- VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = 0,
- vk::ImageLayout layout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
: attachment( attachment_ )
, layout( layout_ )
{}
AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAttachmentReference*>(this) = rhs;
+ *this = rhs;
}
AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAttachmentReference*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>(&rhs);
return *this;
}
@@ -21281,7 +21203,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AttachmentReference & setLayout( vk::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
@@ -21309,99 +21231,74 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t attachment;
- vk::ImageLayout layout;
+ uint32_t attachment = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentReference>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct AttachmentReference2
{
- struct AttachmentReference2KHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR AttachmentReference2KHR( uint32_t attachment_ = 0,
- vk::ImageLayout layout_ = vk::ImageLayout::eUndefined,
- vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags() ) VULKAN_HPP_NOEXCEPT
- : attachment( attachment_ )
- , layout( layout_ )
- , aspectMask( aspectMask_ )
- {}
-
- AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentReference2KHR*>(this) = rhs;
- }
-
- AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentReference2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eAttachmentReference2KHR;
- const void* pNext = nullptr;
- uint32_t attachment;
- vk::ImageLayout layout;
- vk::ImageAspectFlags aspectMask;
- };
- static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct AttachmentReference2KHR : public layout::AttachmentReference2KHR
- {
- VULKAN_HPP_CONSTEXPR AttachmentReference2KHR( uint32_t attachment_ = 0,
- vk::ImageLayout layout_ = vk::ImageLayout::eUndefined,
- vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentReference2KHR( attachment_, layout_, aspectMask_ )
+ VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ : attachment( attachment_ )
+ , layout( layout_ )
+ , aspectMask( aspectMask_ )
{}
- AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentReference2KHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::AttachmentReference2 & operator=( VULKAN_HPP_NAMESPACE::AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) - offsetof( AttachmentReference2, pNext ) );
+ return *this;
+ }
- AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AttachmentReference2KHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ AttachmentReference2& operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>(&rhs);
return *this;
}
- AttachmentReference2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- AttachmentReference2KHR & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
{
attachment = attachment_;
return *this;
}
- AttachmentReference2KHR & setLayout( vk::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- AttachmentReference2KHR & setAspectMask( vk::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
}
- operator VkAttachmentReference2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAttachmentReference2KHR*>( this );
+ return *reinterpret_cast<const VkAttachmentReference2*>( this );
}
- operator VkAttachmentReference2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkAttachmentReference2KHR*>( this );
+ return *reinterpret_cast<VkAttachmentReference2*>( this );
}
- bool operator==( AttachmentReference2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -21410,117 +21307,102 @@ namespace VULKAN_HPP_NAMESPACE
&& ( aspectMask == rhs.aspectMask );
}
- bool operator!=( AttachmentReference2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::AttachmentReference2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
+ const void* pNext = {};
+ uint32_t attachment = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
};
- static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AttachmentReference2KHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct AttachmentReferenceStencilLayoutKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayoutKHR( vk::ImageLayout stencilLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : stencilLayout( stencilLayout_ )
- {}
-
- AttachmentReferenceStencilLayoutKHR( VkAttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentReferenceStencilLayoutKHR*>(this) = rhs;
- }
-
- AttachmentReferenceStencilLayoutKHR& operator=( VkAttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkAttachmentReferenceStencilLayoutKHR*>(this) = rhs;
- return *this;
- }
+ static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
- public:
- vk::StructureType sType = StructureType::eAttachmentReferenceStencilLayoutKHR;
- void* pNext = nullptr;
- vk::ImageLayout stencilLayout;
- };
- static_assert( sizeof( AttachmentReferenceStencilLayoutKHR ) == sizeof( VkAttachmentReferenceStencilLayoutKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct AttachmentReferenceStencilLayoutKHR : public layout::AttachmentReferenceStencilLayoutKHR
+ struct AttachmentReferenceStencilLayout
{
- VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayoutKHR( vk::ImageLayout stencilLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentReferenceStencilLayoutKHR( stencilLayout_ )
+ VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ : stencilLayout( stencilLayout_ )
{}
- AttachmentReferenceStencilLayoutKHR( VkAttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::AttachmentReferenceStencilLayoutKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout & operator=( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) - offsetof( AttachmentReferenceStencilLayout, pNext ) );
+ return *this;
+ }
- AttachmentReferenceStencilLayoutKHR& operator=( VkAttachmentReferenceStencilLayoutKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::AttachmentReferenceStencilLayoutKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ AttachmentReferenceStencilLayout& operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>(&rhs);
return *this;
}
- AttachmentReferenceStencilLayoutKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReferenceStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- AttachmentReferenceStencilLayoutKHR & setStencilLayout( vk::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
{
stencilLayout = stencilLayout_;
return *this;
}
- operator VkAttachmentReferenceStencilLayoutKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkAttachmentReferenceStencilLayoutKHR*>( this );
+ return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
}
- operator VkAttachmentReferenceStencilLayoutKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkAttachmentReferenceStencilLayoutKHR*>( this );
+ return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
}
- bool operator==( AttachmentReferenceStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stencilLayout == rhs.stencilLayout );
}
- bool operator!=( AttachmentReferenceStencilLayoutKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::AttachmentReferenceStencilLayoutKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
- static_assert( sizeof( AttachmentReferenceStencilLayoutKHR ) == sizeof( VkAttachmentReferenceStencilLayoutKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<AttachmentReferenceStencilLayoutKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
struct Extent2D
{
- VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = 0,
- uint32_t height_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {},
+ uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
: width( width_ )
, height( height_ )
{}
Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExtent2D*>(this) = rhs;
+ *this = rhs;
}
Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExtent2D*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>(&rhs);
return *this;
}
@@ -21558,28 +21440,28 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t width;
- uint32_t height;
+ uint32_t width = {};
+ uint32_t height = {};
};
static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Extent2D>::value, "struct wrapper is not a standard layout!" );
struct SampleLocationEXT
{
- VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = 0,
- float y_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {},
+ float y_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
{}
SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSampleLocationEXT*>(this) = rhs;
+ *this = rhs;
}
SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSampleLocationEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>(&rhs);
return *this;
}
@@ -21617,65 +21499,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- float x;
- float y;
+ float x = {};
+ float y = {};
};
static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SampleLocationsInfoEXT
{
- struct SampleLocationsInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( vk::SampleCountFlagBits sampleLocationsPerPixel_ = vk::SampleCountFlagBits::e1,
- vk::Extent2D sampleLocationGridSize_ = vk::Extent2D(),
- uint32_t sampleLocationsCount_ = 0,
- const vk::SampleLocationEXT* pSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
- , sampleLocationGridSize( sampleLocationGridSize_ )
- , sampleLocationsCount( sampleLocationsCount_ )
- , pSampleLocations( pSampleLocations_ )
- {}
-
- SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSampleLocationsInfoEXT*>(this) = rhs;
- }
-
- SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSampleLocationsInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSampleLocationsInfoEXT;
- const void* pNext = nullptr;
- vk::SampleCountFlagBits sampleLocationsPerPixel;
- vk::Extent2D sampleLocationGridSize;
- uint32_t sampleLocationsCount;
- const vk::SampleLocationEXT* pSampleLocations;
- };
- static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SampleLocationsInfoEXT : public layout::SampleLocationsInfoEXT
- {
- VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( vk::SampleCountFlagBits sampleLocationsPerPixel_ = vk::SampleCountFlagBits::e1,
- vk::Extent2D sampleLocationGridSize_ = vk::Extent2D(),
- uint32_t sampleLocationsCount_ = 0,
- const vk::SampleLocationEXT* pSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::SampleLocationsInfoEXT( sampleLocationsPerPixel_, sampleLocationGridSize_, sampleLocationsCount_, pSampleLocations_ )
+ VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {},
+ uint32_t sampleLocationsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
+ : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
+ , sampleLocationGridSize( sampleLocationGridSize_ )
+ , sampleLocationsCount( sampleLocationsCount_ )
+ , pSampleLocations( pSampleLocations_ )
{}
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) - offsetof( SampleLocationsInfoEXT, pNext ) );
+ return *this;
+ }
+
SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SampleLocationsInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SampleLocationsInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>(&rhs);
return *this;
}
@@ -21685,13 +21540,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SampleLocationsInfoEXT & setSampleLocationsPerPixel( vk::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
+ SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsPerPixel = sampleLocationsPerPixel_;
return *this;
}
- SampleLocationsInfoEXT & setSampleLocationGridSize( vk::Extent2D sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
+ SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationGridSize = sampleLocationGridSize_;
return *this;
@@ -21703,7 +21558,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SampleLocationsInfoEXT & setPSampleLocations( const vk::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pSampleLocations = pSampleLocations_;
return *this;
@@ -21723,7 +21578,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( sampleLocationsPerPixel, rhs.sampleLocationsPerPixel )
+ && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
&& ( sampleLocationGridSize == rhs.sampleLocationGridSize )
&& ( sampleLocationsCount == rhs.sampleLocationsCount )
&& ( pSampleLocations == rhs.pSampleLocations );
@@ -21734,28 +21589,33 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SampleLocationsInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
+ uint32_t sampleLocationsCount = {};
+ const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {};
};
static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
struct AttachmentSampleLocationsEXT
{
- VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = 0,
- vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {},
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
: attachmentIndex( attachmentIndex_ )
, sampleLocationsInfo( sampleLocationsInfo_ )
{}
AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAttachmentSampleLocationsEXT*>(this) = rhs;
+ *this = rhs;
}
AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkAttachmentSampleLocationsEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>(&rhs);
return *this;
}
@@ -21765,7 +21625,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- AttachmentSampleLocationsEXT & setSampleLocationsInfo( vk::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
@@ -21793,55 +21653,29 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t attachmentIndex;
- vk::SampleLocationsInfoEXT sampleLocationsInfo;
+ uint32_t attachmentIndex = {};
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BaseInStructure
- {
- protected:
- BaseInStructure() VULKAN_HPP_NOEXCEPT
- {}
-
- BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBaseInStructure*>(this) = rhs;
- }
-
- BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBaseInStructure*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType;
- const struct vk::BaseInStructure* pNext = nullptr;
- };
- static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "layout struct and wrapper have different size!" );
- }
-
- struct BaseInStructure : public layout::BaseInStructure
+ struct BaseInStructure
{
BaseInStructure() VULKAN_HPP_NOEXCEPT
- : layout::BaseInStructure()
{}
BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BaseInStructure( rhs )
- {}
+ {
+ *this = rhs;
+ }
BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BaseInStructure::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>(&rhs);
return *this;
}
- BaseInStructure & setPNext( const struct vk::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
+ BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -21868,55 +21702,30 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BaseInStructure::sType;
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = {};
+ const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {};
};
static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BaseInStructure>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BaseOutStructure
- {
- protected:
- BaseOutStructure() VULKAN_HPP_NOEXCEPT
- {}
-
- BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBaseOutStructure*>(this) = rhs;
- }
-
- BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBaseOutStructure*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType;
- struct vk::BaseOutStructure* pNext = nullptr;
- };
- static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "layout struct and wrapper have different size!" );
- }
-
- struct BaseOutStructure : public layout::BaseOutStructure
+ struct BaseOutStructure
{
BaseOutStructure() VULKAN_HPP_NOEXCEPT
- : layout::BaseOutStructure()
{}
BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BaseOutStructure( rhs )
- {}
+ {
+ *this = rhs;
+ }
BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BaseOutStructure::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>(&rhs);
return *this;
}
- BaseOutStructure & setPNext( struct vk::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
+ BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
@@ -21943,69 +21752,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BaseOutStructure::sType;
+ public:
+ VULKAN_HPP_NAMESPACE::StructureType sType = {};
+ struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {};
};
static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BindAccelerationStructureMemoryInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0,
- uint32_t deviceIndexCount_ = 0,
- const uint32_t* pDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : accelerationStructure( accelerationStructure_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- , deviceIndexCount( deviceIndexCount_ )
- , pDeviceIndices( pDeviceIndices_ )
- {}
-
- BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>(this) = rhs;
- }
-
- BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
- const void* pNext = nullptr;
- vk::AccelerationStructureNV accelerationStructure;
- vk::DeviceMemory memory;
- vk::DeviceSize memoryOffset;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
- };
- static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct BindAccelerationStructureMemoryInfoNV : public layout::BindAccelerationStructureMemoryInfoNV
+ struct BindAccelerationStructureMemoryInfoNV
{
- VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( vk::AccelerationStructureNV accelerationStructure_ = vk::AccelerationStructureNV(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0,
- uint32_t deviceIndexCount_ = 0,
- const uint32_t* pDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::BindAccelerationStructureMemoryInfoNV( accelerationStructure_, memory_, memoryOffset_, deviceIndexCount_, pDeviceIndices_ )
+ VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ uint32_t deviceIndexCount_ = {},
+ const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : accelerationStructure( accelerationStructure_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
+ , deviceIndexCount( deviceIndexCount_ )
+ , pDeviceIndices( pDeviceIndices_ )
{}
+ VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV & operator=( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) );
+ return *this;
+ }
+
BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindAccelerationStructureMemoryInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindAccelerationStructureMemoryInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>(&rhs);
return *this;
}
@@ -22015,19 +21796,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( vk::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
{
accelerationStructure = accelerationStructure_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- BindAccelerationStructureMemoryInfoNV & setMemoryOffset( vk::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
@@ -22071,57 +21852,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindAccelerationStructureMemoryInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+ uint32_t deviceIndexCount = {};
+ const uint32_t* pDeviceIndices = {};
};
static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BindBufferMemoryDeviceGroupInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
- const uint32_t* pDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : deviceIndexCount( deviceIndexCount_ )
- , pDeviceIndices( pDeviceIndices_ )
- {}
-
- BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>(this) = rhs;
- }
-
- BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
- const void* pNext = nullptr;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
- };
- static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BindBufferMemoryDeviceGroupInfo : public layout::BindBufferMemoryDeviceGroupInfo
+ struct BindBufferMemoryDeviceGroupInfo
{
- VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
- const uint32_t* pDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::BindBufferMemoryDeviceGroupInfo( deviceIndexCount_, pDeviceIndices_ )
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
+ const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceIndexCount( deviceIndexCount_ )
+ , pDeviceIndices( pDeviceIndices_ )
{}
+ VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) );
+ return *this;
+ }
+
BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindBufferMemoryDeviceGroupInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindBufferMemoryDeviceGroupInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>(&rhs);
return *this;
}
@@ -22166,61 +21930,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindBufferMemoryDeviceGroupInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
+ const void* pNext = {};
+ uint32_t deviceIndexCount = {};
+ const uint32_t* pDeviceIndices = {};
};
static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BindBufferMemoryInfo
{
- struct BindBufferMemoryInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- {}
-
- BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindBufferMemoryInfo*>(this) = rhs;
- }
-
- BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindBufferMemoryInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindBufferMemoryInfo;
- const void* pNext = nullptr;
- vk::Buffer buffer;
- vk::DeviceMemory memory;
- vk::DeviceSize memoryOffset;
- };
- static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BindBufferMemoryInfo : public layout::BindBufferMemoryInfo
- {
- VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::BindBufferMemoryInfo( buffer_, memory_, memoryOffset_ )
+ VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
{}
+ VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) );
+ return *this;
+ }
+
BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindBufferMemoryInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindBufferMemoryInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>(&rhs);
return *this;
}
@@ -22230,19 +21972,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindBufferMemoryInfo & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- BindBufferMemoryInfo & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- BindBufferMemoryInfo & setMemoryOffset( vk::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
@@ -22272,28 +22014,32 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindBufferMemoryInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
struct Offset2D
{
- VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = 0,
- int32_t y_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {},
+ int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
{}
Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkOffset2D*>(this) = rhs;
+ *this = rhs;
}
Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkOffset2D*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>(&rhs);
return *this;
}
@@ -22331,38 +22077,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- int32_t x;
- int32_t y;
+ int32_t x = {};
+ int32_t y = {};
};
static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );
struct Rect2D
{
- VULKAN_HPP_CONSTEXPR Rect2D( vk::Offset2D offset_ = vk::Offset2D(),
- vk::Extent2D extent_ = vk::Extent2D() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
: offset( offset_ )
, extent( extent_ )
{}
Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkRect2D*>(this) = rhs;
+ *this = rhs;
}
Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkRect2D*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>(&rhs);
return *this;
}
- Rect2D & setOffset( vk::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
+ Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- Rect2D & setExtent( vk::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
+ Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -22390,65 +22136,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Offset2D offset;
- vk::Extent2D extent;
+ VULKAN_HPP_NAMESPACE::Offset2D offset = {};
+ VULKAN_HPP_NAMESPACE::Extent2D extent = {};
};
static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BindImageMemoryDeviceGroupInfo
{
- struct BindImageMemoryDeviceGroupInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
- const uint32_t* pDeviceIndices_ = nullptr,
- uint32_t splitInstanceBindRegionCount_ = 0,
- const vk::Rect2D* pSplitInstanceBindRegions_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : deviceIndexCount( deviceIndexCount_ )
- , pDeviceIndices( pDeviceIndices_ )
- , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
- , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
- {}
-
- BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>(this) = rhs;
- }
-
- BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
- const void* pNext = nullptr;
- uint32_t deviceIndexCount;
- const uint32_t* pDeviceIndices;
- uint32_t splitInstanceBindRegionCount;
- const vk::Rect2D* pSplitInstanceBindRegions;
- };
- static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BindImageMemoryDeviceGroupInfo : public layout::BindImageMemoryDeviceGroupInfo
- {
- VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = 0,
- const uint32_t* pDeviceIndices_ = nullptr,
- uint32_t splitInstanceBindRegionCount_ = 0,
- const vk::Rect2D* pSplitInstanceBindRegions_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::BindImageMemoryDeviceGroupInfo( deviceIndexCount_, pDeviceIndices_, splitInstanceBindRegionCount_, pSplitInstanceBindRegions_ )
+ VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {},
+ const uint32_t* pDeviceIndices_ = {},
+ uint32_t splitInstanceBindRegionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceIndexCount( deviceIndexCount_ )
+ , pDeviceIndices( pDeviceIndices_ )
+ , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
+ , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
{}
+ VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) );
+ return *this;
+ }
+
BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindImageMemoryDeviceGroupInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindImageMemoryDeviceGroupInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>(&rhs);
return *this;
}
@@ -22476,7 +22195,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const vk::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
+ BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
{
pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
return *this;
@@ -22507,61 +22226,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindImageMemoryDeviceGroupInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
+ const void* pNext = {};
+ uint32_t deviceIndexCount = {};
+ const uint32_t* pDeviceIndices = {};
+ uint32_t splitInstanceBindRegionCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {};
};
static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BindImageMemoryInfo
{
- struct BindImageMemoryInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( vk::Image image_ = vk::Image(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , memory( memory_ )
- , memoryOffset( memoryOffset_ )
- {}
-
- BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImageMemoryInfo*>(this) = rhs;
- }
-
- BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImageMemoryInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindImageMemoryInfo;
- const void* pNext = nullptr;
- vk::Image image;
- vk::DeviceMemory memory;
- vk::DeviceSize memoryOffset;
- };
- static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BindImageMemoryInfo : public layout::BindImageMemoryInfo
- {
- VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( vk::Image image_ = vk::Image(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::BindImageMemoryInfo( image_, memory_, memoryOffset_ )
+ VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
+ , memory( memory_ )
+ , memoryOffset( memoryOffset_ )
{}
+ VULKAN_HPP_NAMESPACE::BindImageMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) );
+ return *this;
+ }
+
BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindImageMemoryInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindImageMemoryInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>(&rhs);
return *this;
}
@@ -22571,19 +22270,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindImageMemoryInfo & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- BindImageMemoryInfo & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- BindImageMemoryInfo & setMemoryOffset( vk::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
@@ -22613,57 +22312,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindImageMemoryInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
};
static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BindImageMemorySwapchainInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(),
- uint32_t imageIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
- , imageIndex( imageIndex_ )
- {}
-
- BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>(this) = rhs;
- }
-
- BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
- const void* pNext = nullptr;
- vk::SwapchainKHR swapchain;
- uint32_t imageIndex;
- };
- static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct BindImageMemorySwapchainInfoKHR : public layout::BindImageMemorySwapchainInfoKHR
+ struct BindImageMemorySwapchainInfoKHR
{
- VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR(),
- uint32_t imageIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::BindImageMemorySwapchainInfoKHR( swapchain_, imageIndex_ )
+ VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
+ uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : swapchain( swapchain_ )
+ , imageIndex( imageIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR & operator=( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) );
+ return *this;
+ }
+
BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindImageMemorySwapchainInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindImageMemorySwapchainInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>(&rhs);
return *this;
}
@@ -22673,7 +22353,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindImageMemorySwapchainInfoKHR & setSwapchain( vk::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+ BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
@@ -22708,53 +22388,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindImageMemorySwapchainInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
+ uint32_t imageIndex = {};
};
static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BindImagePlaneMemoryInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : planeAspect( planeAspect_ )
- {}
-
- BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImagePlaneMemoryInfo*>(this) = rhs;
- }
-
- BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindImagePlaneMemoryInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
- const void* pNext = nullptr;
- vk::ImageAspectFlagBits planeAspect;
- };
- static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BindImagePlaneMemoryInfo : public layout::BindImagePlaneMemoryInfo
+ struct BindImagePlaneMemoryInfo
{
- VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : layout::BindImagePlaneMemoryInfo( planeAspect_ )
+ VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
+ : planeAspect( planeAspect_ )
{}
+ VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo & operator=( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) );
+ return *this;
+ }
+
BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindImagePlaneMemoryInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindImagePlaneMemoryInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>(&rhs);
return *this;
}
@@ -22764,7 +22426,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindImagePlaneMemoryInfo & setPlaneAspect( vk::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+ BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
@@ -22784,7 +22446,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( planeAspect, rhs.planeAspect );
+ && ( planeAspect == rhs.planeAspect );
}
bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -22792,19 +22454,21 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindImagePlaneMemoryInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
struct SparseMemoryBind
{
- VULKAN_HPP_CONSTEXPR SparseMemoryBind( vk::DeviceSize resourceOffset_ = 0,
- vk::DeviceSize size_ = 0,
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0,
- vk::SparseMemoryBindFlags flags_ = vk::SparseMemoryBindFlags() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
: resourceOffset( resourceOffset_ )
, size( size_ )
, memory( memory_ )
@@ -22814,40 +22478,40 @@ namespace VULKAN_HPP_NAMESPACE
SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseMemoryBind*>(this) = rhs;
+ *this = rhs;
}
SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseMemoryBind*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>(&rhs);
return *this;
}
- SparseMemoryBind & setResourceOffset( vk::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
+ SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
{
resourceOffset = resourceOffset_;
return *this;
}
- SparseMemoryBind & setSize( vk::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
- SparseMemoryBind & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- SparseMemoryBind & setMemoryOffset( vk::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
- SparseMemoryBind & setFlags( vk::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -22878,20 +22542,20 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DeviceSize resourceOffset;
- vk::DeviceSize size;
- vk::DeviceMemory memory;
- vk::DeviceSize memoryOffset;
- vk::SparseMemoryBindFlags flags;
+ VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+ VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
};
static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
struct SparseBufferMemoryBindInfo
{
- VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( vk::Buffer buffer_ = vk::Buffer(),
- uint32_t bindCount_ = 0,
- const vk::SparseMemoryBind* pBinds_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ uint32_t bindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
: buffer( buffer_ )
, bindCount( bindCount_ )
, pBinds( pBinds_ )
@@ -22899,16 +22563,16 @@ namespace VULKAN_HPP_NAMESPACE
SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseBufferMemoryBindInfo*>(this) = rhs;
+ *this = rhs;
}
SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseBufferMemoryBindInfo*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>(&rhs);
return *this;
}
- SparseBufferMemoryBindInfo & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
@@ -22920,7 +22584,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SparseBufferMemoryBindInfo & setPBinds( const vk::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+ SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
@@ -22949,18 +22613,18 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Buffer buffer;
- uint32_t bindCount;
- const vk::SparseMemoryBind* pBinds;
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ uint32_t bindCount = {};
+ const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
};
static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
struct SparseImageOpaqueMemoryBindInfo
{
- VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( vk::Image image_ = vk::Image(),
- uint32_t bindCount_ = 0,
- const vk::SparseMemoryBind* pBinds_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ uint32_t bindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
: image( image_ )
, bindCount( bindCount_ )
, pBinds( pBinds_ )
@@ -22968,16 +22632,16 @@ namespace VULKAN_HPP_NAMESPACE
SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>(this) = rhs;
+ *this = rhs;
}
SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>(&rhs);
return *this;
}
- SparseImageOpaqueMemoryBindInfo & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
@@ -22989,7 +22653,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SparseImageOpaqueMemoryBindInfo & setPBinds( const vk::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
@@ -23018,18 +22682,18 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Image image;
- uint32_t bindCount;
- const vk::SparseMemoryBind* pBinds;
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ uint32_t bindCount = {};
+ const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
};
static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
struct ImageSubresource
{
- VULKAN_HPP_CONSTEXPR ImageSubresource( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(),
- uint32_t mipLevel_ = 0,
- uint32_t arrayLayer_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ uint32_t mipLevel_ = {},
+ uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, mipLevel( mipLevel_ )
, arrayLayer( arrayLayer_ )
@@ -23037,16 +22701,16 @@ namespace VULKAN_HPP_NAMESPACE
ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageSubresource*>(this) = rhs;
+ *this = rhs;
}
ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageSubresource*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>(&rhs);
return *this;
}
- ImageSubresource & setAspectMask( vk::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -23087,25 +22751,25 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageAspectFlags aspectMask;
- uint32_t mipLevel;
- uint32_t arrayLayer;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t mipLevel = {};
+ uint32_t arrayLayer = {};
};
static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );
struct Offset3D
{
- VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = 0,
- int32_t y_ = 0,
- int32_t z_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {},
+ int32_t y_ = {},
+ int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, z( z_ )
{}
explicit Offset3D( Offset2D const& offset2D,
- int32_t z_ = 0 )
+ int32_t z_ = {} )
: x( offset2D.x )
, y( offset2D.y )
, z( z_ )
@@ -23113,12 +22777,12 @@ namespace VULKAN_HPP_NAMESPACE
Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkOffset3D*>(this) = rhs;
+ *this = rhs;
}
Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkOffset3D*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>(&rhs);
return *this;
}
@@ -23163,25 +22827,25 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- int32_t x;
- int32_t y;
- int32_t z;
+ int32_t x = {};
+ int32_t y = {};
+ int32_t z = {};
};
static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );
struct Extent3D
{
- VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = 0,
- uint32_t height_ = 0,
- uint32_t depth_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {},
+ uint32_t height_ = {},
+ uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
: width( width_ )
, height( height_ )
, depth( depth_ )
{}
explicit Extent3D( Extent2D const& extent2D,
- uint32_t depth_ = 0 )
+ uint32_t depth_ = {} )
: width( extent2D.width )
, height( extent2D.height )
, depth( depth_ )
@@ -23189,12 +22853,12 @@ namespace VULKAN_HPP_NAMESPACE
Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExtent3D*>(this) = rhs;
+ *this = rhs;
}
Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExtent3D*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>(&rhs);
return *this;
}
@@ -23239,21 +22903,21 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t width;
- uint32_t height;
- uint32_t depth;
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t depth = {};
};
static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );
struct SparseImageMemoryBind
{
- VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( vk::ImageSubresource subresource_ = vk::ImageSubresource(),
- vk::Offset3D offset_ = vk::Offset3D(),
- vk::Extent3D extent_ = vk::Extent3D(),
- vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize memoryOffset_ = 0,
- vk::SparseMemoryBindFlags flags_ = vk::SparseMemoryBindFlags() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D offset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {},
+ VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
: subresource( subresource_ )
, offset( offset_ )
, extent( extent_ )
@@ -23264,46 +22928,46 @@ namespace VULKAN_HPP_NAMESPACE
SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageMemoryBind*>(this) = rhs;
+ *this = rhs;
}
SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageMemoryBind*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>(&rhs);
return *this;
}
- SparseImageMemoryBind & setSubresource( vk::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT
{
subresource = subresource_;
return *this;
}
- SparseImageMemoryBind & setOffset( vk::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- SparseImageMemoryBind & setExtent( vk::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
}
- SparseImageMemoryBind & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- SparseImageMemoryBind & setMemoryOffset( vk::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
{
memoryOffset = memoryOffset_;
return *this;
}
- SparseImageMemoryBind & setFlags( vk::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -23335,21 +22999,21 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageSubresource subresource;
- vk::Offset3D offset;
- vk::Extent3D extent;
- vk::DeviceMemory memory;
- vk::DeviceSize memoryOffset;
- vk::SparseMemoryBindFlags flags;
+ VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D offset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
+ VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
};
static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
struct SparseImageMemoryBindInfo
{
- VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( vk::Image image_ = vk::Image(),
- uint32_t bindCount_ = 0,
- const vk::SparseImageMemoryBind* pBinds_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ uint32_t bindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
: image( image_ )
, bindCount( bindCount_ )
, pBinds( pBinds_ )
@@ -23357,16 +23021,16 @@ namespace VULKAN_HPP_NAMESPACE
SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageMemoryBindInfo*>(this) = rhs;
+ *this = rhs;
}
SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageMemoryBindInfo*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>(&rhs);
return *this;
}
- SparseImageMemoryBindInfo & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
@@ -23378,7 +23042,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SparseImageMemoryBindInfo & setPBinds( const vk::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBinds = pBinds_;
return *this;
@@ -23407,90 +23071,51 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Image image;
- uint32_t bindCount;
- const vk::SparseImageMemoryBind* pBinds;
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ uint32_t bindCount = {};
+ const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {};
};
static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BindSparseInfo
{
- struct BindSparseInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = 0,
- const vk::Semaphore* pWaitSemaphores_ = nullptr,
- uint32_t bufferBindCount_ = 0,
- const vk::SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr,
- uint32_t imageOpaqueBindCount_ = 0,
- const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr,
- uint32_t imageBindCount_ = 0,
- const vk::SparseImageMemoryBindInfo* pImageBinds_ = nullptr,
- uint32_t signalSemaphoreCount_ = 0,
- const vk::Semaphore* pSignalSemaphores_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , bufferBindCount( bufferBindCount_ )
- , pBufferBinds( pBufferBinds_ )
- , imageOpaqueBindCount( imageOpaqueBindCount_ )
- , pImageOpaqueBinds( pImageOpaqueBinds_ )
- , imageBindCount( imageBindCount_ )
- , pImageBinds( pImageBinds_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphores( pSignalSemaphores_ )
- {}
-
- BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindSparseInfo*>(this) = rhs;
- }
-
- BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBindSparseInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBindSparseInfo;
- const void* pNext = nullptr;
- uint32_t waitSemaphoreCount;
- const vk::Semaphore* pWaitSemaphores;
- uint32_t bufferBindCount;
- const vk::SparseBufferMemoryBindInfo* pBufferBinds;
- uint32_t imageOpaqueBindCount;
- const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
- uint32_t imageBindCount;
- const vk::SparseImageMemoryBindInfo* pImageBinds;
- uint32_t signalSemaphoreCount;
- const vk::Semaphore* pSignalSemaphores;
- };
- static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BindSparseInfo : public layout::BindSparseInfo
- {
- VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = 0,
- const vk::Semaphore* pWaitSemaphores_ = nullptr,
- uint32_t bufferBindCount_ = 0,
- const vk::SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr,
- uint32_t imageOpaqueBindCount_ = 0,
- const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr,
- uint32_t imageBindCount_ = 0,
- const vk::SparseImageMemoryBindInfo* pImageBinds_ = nullptr,
- uint32_t signalSemaphoreCount_ = 0,
- const vk::Semaphore* pSignalSemaphores_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::BindSparseInfo( waitSemaphoreCount_, pWaitSemaphores_, bufferBindCount_, pBufferBinds_, imageOpaqueBindCount_, pImageOpaqueBinds_, imageBindCount_, pImageBinds_, signalSemaphoreCount_, pSignalSemaphores_ )
+ VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
+ uint32_t bufferBindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {},
+ uint32_t imageOpaqueBindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {},
+ uint32_t imageBindCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {},
+ uint32_t signalSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , bufferBindCount( bufferBindCount_ )
+ , pBufferBinds( pBufferBinds_ )
+ , imageOpaqueBindCount( imageOpaqueBindCount_ )
+ , pImageOpaqueBinds( pImageOpaqueBinds_ )
+ , imageBindCount( imageBindCount_ )
+ , pImageBinds( pImageBinds_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphores( pSignalSemaphores_ )
{}
+ VULKAN_HPP_NAMESPACE::BindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) );
+ return *this;
+ }
+
BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BindSparseInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BindSparseInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>(&rhs);
return *this;
}
@@ -23506,7 +23131,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindSparseInfo & setPWaitSemaphores( const vk::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
@@ -23518,7 +23143,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindSparseInfo & setPBufferBinds( const vk::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
+ BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
{
pBufferBinds = pBufferBinds_;
return *this;
@@ -23530,7 +23155,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindSparseInfo & setPImageOpaqueBinds( const vk::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
+ BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
{
pImageOpaqueBinds = pImageOpaqueBinds_;
return *this;
@@ -23542,7 +23167,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindSparseInfo & setPImageBinds( const vk::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT
+ BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT
{
pImageBinds = pImageBinds_;
return *this;
@@ -23554,7 +23179,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BindSparseInfo & setPSignalSemaphores( const vk::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphores = pSignalSemaphores_;
return *this;
@@ -23591,17 +23216,28 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BindSparseInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
+ const void* pNext = {};
+ uint32_t waitSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+ uint32_t bufferBindCount = {};
+ const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {};
+ uint32_t imageOpaqueBindCount = {};
+ const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {};
+ uint32_t imageBindCount = {};
+ const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {};
+ uint32_t signalSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
};
static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
struct BufferCopy
{
- VULKAN_HPP_CONSTEXPR BufferCopy( vk::DeviceSize srcOffset_ = 0,
- vk::DeviceSize dstOffset_ = 0,
- vk::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
: srcOffset( srcOffset_ )
, dstOffset( dstOffset_ )
, size( size_ )
@@ -23609,28 +23245,28 @@ namespace VULKAN_HPP_NAMESPACE
BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkBufferCopy*>(this) = rhs;
+ *this = rhs;
}
BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkBufferCopy*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>(&rhs);
return *this;
}
- BufferCopy & setSrcOffset( vk::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- BufferCopy & setDstOffset( vk::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
- BufferCopy & setSize( vk::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
@@ -23659,74 +23295,43 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DeviceSize srcOffset;
- vk::DeviceSize dstOffset;
- vk::DeviceSize size;
+ VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BufferCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR BufferCreateInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(),
- vk::DeviceSize size_ = 0,
- vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(),
- vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , size( size_ )
- , usage( usage_ )
- , sharingMode( sharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- {}
-
- BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferCreateInfo*>(this) = rhs;
- }
-
- BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBufferCreateInfo;
- const void* pNext = nullptr;
- vk::BufferCreateFlags flags;
- vk::DeviceSize size;
- vk::BufferUsageFlags usage;
- vk::SharingMode sharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- };
- static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct BufferCreateInfo : public layout::BufferCreateInfo
+ struct BufferCreateInfo
{
- VULKAN_HPP_CONSTEXPR BufferCreateInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(),
- vk::DeviceSize size_ = 0,
- vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(),
- vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::BufferCreateInfo( flags_, size_, usage_, sharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_ )
+ VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , size( size_ )
+ , usage( usage_ )
+ , sharingMode( sharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
{}
+ VULKAN_HPP_NAMESPACE::BufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) );
+ return *this;
+ }
+
BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BufferCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BufferCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>(&rhs);
return *this;
}
@@ -23736,25 +23341,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferCreateInfo & setFlags( vk::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- BufferCreateInfo & setSize( vk::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
}
- BufferCreateInfo & setUsage( vk::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- BufferCreateInfo & setSharingMode( vk::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
@@ -23799,53 +23404,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BufferCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t* pQueueFamilyIndices = {};
};
static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct BufferDeviceAddressCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( vk::DeviceAddress deviceAddress_ = 0 ) VULKAN_HPP_NOEXCEPT
- : deviceAddress( deviceAddress_ )
- {}
-
- BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>(this) = rhs;
- }
-
- BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
- const void* pNext = nullptr;
- vk::DeviceAddress deviceAddress;
- };
- static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct BufferDeviceAddressCreateInfoEXT : public layout::BufferDeviceAddressCreateInfoEXT
+ struct BufferDeviceAddressCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( vk::DeviceAddress deviceAddress_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::BufferDeviceAddressCreateInfoEXT( deviceAddress_ )
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceAddress( deviceAddress_ )
{}
+ VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BufferDeviceAddressCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BufferDeviceAddressCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -23855,7 +23446,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferDeviceAddressCreateInfoEXT & setDeviceAddress( vk::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
deviceAddress = deviceAddress_;
return *this;
@@ -23883,102 +23474,85 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BufferDeviceAddressCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
};
static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BufferDeviceAddressInfo
{
- struct BufferDeviceAddressInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoEXT( vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- {}
-
- BufferDeviceAddressInfoEXT( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferDeviceAddressInfoEXT*>(this) = rhs;
- }
-
- BufferDeviceAddressInfoEXT& operator=( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferDeviceAddressInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBufferDeviceAddressInfoEXT;
- const void* pNext = nullptr;
- vk::Buffer buffer;
- };
- static_assert( sizeof( BufferDeviceAddressInfoEXT ) == sizeof( VkBufferDeviceAddressInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct BufferDeviceAddressInfoEXT : public layout::BufferDeviceAddressInfoEXT
- {
- VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoEXT( vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : layout::BufferDeviceAddressInfoEXT( buffer_ )
+ VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
{}
- BufferDeviceAddressInfoEXT( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BufferDeviceAddressInfoEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & operator=( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) - offsetof( BufferDeviceAddressInfo, pNext ) );
+ return *this;
+ }
+
+ BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- BufferDeviceAddressInfoEXT& operator=( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ BufferDeviceAddressInfo& operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BufferDeviceAddressInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>(&rhs);
return *this;
}
- BufferDeviceAddressInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ BufferDeviceAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- BufferDeviceAddressInfoEXT & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- operator VkBufferDeviceAddressInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkBufferDeviceAddressInfoEXT*>( this );
+ return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
}
- operator VkBufferDeviceAddressInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkBufferDeviceAddressInfoEXT*>( this );
+ return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
}
- bool operator==( BufferDeviceAddressInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( buffer == rhs.buffer );
}
- bool operator!=( BufferDeviceAddressInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::BufferDeviceAddressInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
- static_assert( sizeof( BufferDeviceAddressInfoEXT ) == sizeof( VkBufferDeviceAddressInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<BufferDeviceAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
struct ImageSubresourceLayers
{
- VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(),
- uint32_t mipLevel_ = 0,
- uint32_t baseArrayLayer_ = 0,
- uint32_t layerCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ uint32_t mipLevel_ = {},
+ uint32_t baseArrayLayer_ = {},
+ uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, mipLevel( mipLevel_ )
, baseArrayLayer( baseArrayLayer_ )
@@ -23987,16 +23561,16 @@ namespace VULKAN_HPP_NAMESPACE
ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageSubresourceLayers*>(this) = rhs;
+ *this = rhs;
}
ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageSubresourceLayers*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>(&rhs);
return *this;
}
- ImageSubresourceLayers & setAspectMask( vk::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -24044,22 +23618,22 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageAspectFlags aspectMask;
- uint32_t mipLevel;
- uint32_t baseArrayLayer;
- uint32_t layerCount;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t mipLevel = {};
+ uint32_t baseArrayLayer = {};
+ uint32_t layerCount = {};
};
static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
struct BufferImageCopy
{
- VULKAN_HPP_CONSTEXPR BufferImageCopy( vk::DeviceSize bufferOffset_ = 0,
- uint32_t bufferRowLength_ = 0,
- uint32_t bufferImageHeight_ = 0,
- vk::ImageSubresourceLayers imageSubresource_ = vk::ImageSubresourceLayers(),
- vk::Offset3D imageOffset_ = vk::Offset3D(),
- vk::Extent3D imageExtent_ = vk::Extent3D() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {},
+ uint32_t bufferRowLength_ = {},
+ uint32_t bufferImageHeight_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
: bufferOffset( bufferOffset_ )
, bufferRowLength( bufferRowLength_ )
, bufferImageHeight( bufferImageHeight_ )
@@ -24070,16 +23644,16 @@ namespace VULKAN_HPP_NAMESPACE
BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkBufferImageCopy*>(this) = rhs;
+ *this = rhs;
}
BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkBufferImageCopy*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>(&rhs);
return *this;
}
- BufferImageCopy & setBufferOffset( vk::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
{
bufferOffset = bufferOffset_;
return *this;
@@ -24097,19 +23671,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferImageCopy & setImageSubresource( vk::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT
{
imageSubresource = imageSubresource_;
return *this;
}
- BufferImageCopy & setImageOffset( vk::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT
{
imageOffset = imageOffset_;
return *this;
}
- BufferImageCopy & setImageExtent( vk::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
@@ -24141,81 +23715,48 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DeviceSize bufferOffset;
- uint32_t bufferRowLength;
- uint32_t bufferImageHeight;
- vk::ImageSubresourceLayers imageSubresource;
- vk::Offset3D imageOffset;
- vk::Extent3D imageExtent;
+ VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
+ uint32_t bufferRowLength = {};
+ uint32_t bufferImageHeight = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
};
static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BufferMemoryBarrier
{
- struct BufferMemoryBarrier
- {
- protected:
- VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- uint32_t srcQueueFamilyIndex_ = 0,
- uint32_t dstQueueFamilyIndex_ = 0,
- vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
- , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
- , buffer( buffer_ )
- , offset( offset_ )
- , size( size_ )
- {}
-
- BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferMemoryBarrier*>(this) = rhs;
- }
-
- BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferMemoryBarrier*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBufferMemoryBarrier;
- const void* pNext = nullptr;
- vk::AccessFlags srcAccessMask;
- vk::AccessFlags dstAccessMask;
- uint32_t srcQueueFamilyIndex;
- uint32_t dstQueueFamilyIndex;
- vk::Buffer buffer;
- vk::DeviceSize offset;
- vk::DeviceSize size;
- };
- static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "layout struct and wrapper have different size!" );
- }
-
- struct BufferMemoryBarrier : public layout::BufferMemoryBarrier
- {
- VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- uint32_t srcQueueFamilyIndex_ = 0,
- uint32_t dstQueueFamilyIndex_ = 0,
- vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::BufferMemoryBarrier( srcAccessMask_, dstAccessMask_, srcQueueFamilyIndex_, dstQueueFamilyIndex_, buffer_, offset_, size_ )
+ VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ uint32_t srcQueueFamilyIndex_ = {},
+ uint32_t dstQueueFamilyIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+ , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+ , buffer( buffer_ )
+ , offset( offset_ )
+ , size( size_ )
{}
+ VULKAN_HPP_NAMESPACE::BufferMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) );
+ return *this;
+ }
+
BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BufferMemoryBarrier( rhs )
- {}
+ {
+ *this = rhs;
+ }
BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BufferMemoryBarrier::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>(&rhs);
return *this;
}
@@ -24225,13 +23766,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferMemoryBarrier & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- BufferMemoryBarrier & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
@@ -24249,19 +23790,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferMemoryBarrier & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- BufferMemoryBarrier & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- BufferMemoryBarrier & setSize( vk::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
@@ -24295,53 +23836,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BufferMemoryBarrier::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ uint32_t srcQueueFamilyIndex = {};
+ uint32_t dstQueueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BufferMemoryRequirementsInfo2
{
- struct BufferMemoryRequirementsInfo2
- {
- protected:
- VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- {}
-
- BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>(this) = rhs;
- }
-
- BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
- const void* pNext = nullptr;
- vk::Buffer buffer;
- };
- static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "layout struct and wrapper have different size!" );
- }
-
- struct BufferMemoryRequirementsInfo2 : public layout::BufferMemoryRequirementsInfo2
- {
- VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : layout::BufferMemoryRequirementsInfo2( buffer_ )
+ VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
{}
+ VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) );
+ return *this;
+ }
+
BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BufferMemoryRequirementsInfo2( rhs )
- {}
+ {
+ *this = rhs;
+ }
BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BufferMemoryRequirementsInfo2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>(&rhs);
return *this;
}
@@ -24351,7 +23879,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferMemoryRequirementsInfo2 & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
@@ -24379,69 +23907,107 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BufferMemoryRequirementsInfo2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct BufferOpaqueCaptureAddressCreateInfo
{
- struct BufferViewCreateInfo
+ VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
+ : opaqueCaptureAddress( opaqueCaptureAddress_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( vk::BufferViewCreateFlags flags_ = vk::BufferViewCreateFlags(),
- vk::Buffer buffer_ = vk::Buffer(),
- vk::Format format_ = vk::Format::eUndefined,
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize range_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , buffer( buffer_ )
- , format( format_ )
- , offset( offset_ )
- , range( range_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) - offsetof( BufferOpaqueCaptureAddressCreateInfo, pNext ) );
+ return *this;
+ }
- BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferViewCreateInfo*>(this) = rhs;
- }
+ BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkBufferViewCreateInfo*>(this) = rhs;
- return *this;
- }
+ BufferOpaqueCaptureAddressCreateInfo& operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::eBufferViewCreateInfo;
- const void* pNext = nullptr;
- vk::BufferViewCreateFlags flags;
- vk::Buffer buffer;
- vk::Format format;
- vk::DeviceSize offset;
- vk::DeviceSize range;
- };
- static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "layout struct and wrapper have different size!" );
- }
+ BufferOpaqueCaptureAddressCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ opaqueCaptureAddress = opaqueCaptureAddress_;
+ return *this;
+ }
- struct BufferViewCreateInfo : public layout::BufferViewCreateInfo
+ operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
+ }
+
+ operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
+ }
+
+ bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+ }
+
+ bool operator!=( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
+ const void* pNext = {};
+ uint64_t opaqueCaptureAddress = {};
+ };
+ static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<BufferOpaqueCaptureAddressCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+ struct BufferViewCreateInfo
{
- VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( vk::BufferViewCreateFlags flags_ = vk::BufferViewCreateFlags(),
- vk::Buffer buffer_ = vk::Buffer(),
- vk::Format format_ = vk::Format::eUndefined,
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize range_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::BufferViewCreateInfo( flags_, buffer_, format_, offset_, range_ )
+ VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , buffer( buffer_ )
+ , format( format_ )
+ , offset( offset_ )
+ , range( range_ )
{}
+ VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) );
+ return *this;
+ }
+
BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::BufferViewCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::BufferViewCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>(&rhs);
return *this;
}
@@ -24451,31 +24017,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- BufferViewCreateInfo & setFlags( vk::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- BufferViewCreateInfo & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- BufferViewCreateInfo & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- BufferViewCreateInfo & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- BufferViewCreateInfo & setRange( vk::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+ BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
@@ -24507,53 +24073,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::BufferViewCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize range = {};
};
static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct CalibratedTimestampInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( vk::TimeDomainEXT timeDomain_ = vk::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
- : timeDomain( timeDomain_ )
- {}
-
- CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCalibratedTimestampInfoEXT*>(this) = rhs;
- }
-
- CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCalibratedTimestampInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
- const void* pNext = nullptr;
- vk::TimeDomainEXT timeDomain;
- };
- static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct CalibratedTimestampInfoEXT : public layout::CalibratedTimestampInfoEXT
+ struct CalibratedTimestampInfoEXT
{
- VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( vk::TimeDomainEXT timeDomain_ = vk::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
- : layout::CalibratedTimestampInfoEXT( timeDomain_ )
+ VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
+ : timeDomain( timeDomain_ )
{}
+ VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) );
+ return *this;
+ }
+
CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CalibratedTimestampInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CalibratedTimestampInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>(&rhs);
return *this;
}
@@ -24563,7 +24114,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CalibratedTimestampInfoEXT & setTimeDomain( vk::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
+ CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
{
timeDomain = timeDomain_;
return *this;
@@ -24591,53 +24142,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CalibratedTimestampInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
};
static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct CheckpointDataNV
{
- struct CheckpointDataNV
- {
- protected:
- CheckpointDataNV() VULKAN_HPP_NOEXCEPT
- {}
-
- CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCheckpointDataNV*>(this) = rhs;
- }
-
- CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCheckpointDataNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCheckpointDataNV;
- void* pNext = nullptr;
- vk::PipelineStageFlagBits stage;
- void* pCheckpointMarker;
- };
- static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "layout struct and wrapper have different size!" );
- }
-
- struct CheckpointDataNV : public layout::CheckpointDataNV
- {
- CheckpointDataNV() VULKAN_HPP_NOEXCEPT
- : layout::CheckpointDataNV()
+ CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe,
+ void* pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT
+ : stage( stage_ )
+ , pCheckpointMarker( pCheckpointMarker_ )
{}
+ VULKAN_HPP_NAMESPACE::CheckpointDataNV & operator=( VULKAN_HPP_NAMESPACE::CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) - offsetof( CheckpointDataNV, pNext ) );
+ return *this;
+ }
+
CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CheckpointDataNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CheckpointDataNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>(&rhs);
return *this;
}
@@ -24655,7 +24189,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( stage, rhs.stage )
+ && ( stage == rhs.stage )
&& ( pCheckpointMarker == rhs.pCheckpointMarker );
}
@@ -24664,15 +24198,18 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CheckpointDataNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
+ void* pCheckpointMarker = {};
};
static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
union ClearColorValue
{
- ClearColorValue( const std::array<float,4>& float32_ = { { 0 } } )
+ ClearColorValue( const std::array<float,4>& float32_ = {} )
{
memcpy( float32, float32_.data(), 4 * sizeof( float ) );
}
@@ -24704,6 +24241,13 @@ namespace VULKAN_HPP_NAMESPACE
memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
return *this;
}
+
+ VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
+ return *this;
+ }
+
operator VkClearColorValue const&() const
{
return *reinterpret_cast<const VkClearColorValue*>(this);
@@ -24721,20 +24265,20 @@ namespace VULKAN_HPP_NAMESPACE
struct ClearDepthStencilValue
{
- VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = 0,
- uint32_t stencil_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {},
+ uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
: depth( depth_ )
, stencil( stencil_ )
{}
ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkClearDepthStencilValue*>(this) = rhs;
+ *this = rhs;
}
ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkClearDepthStencilValue*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>(&rhs);
return *this;
}
@@ -24772,35 +24316,42 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- float depth;
- uint32_t stencil;
+ float depth = {};
+ uint32_t stencil = {};
};
static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
union ClearValue
{
- ClearValue( vk::ClearColorValue color_ = vk::ClearColorValue() )
+ ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
{
color = color_;
}
- ClearValue( vk::ClearDepthStencilValue depthStencil_ )
+ ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
{
depthStencil = depthStencil_;
}
- ClearValue & setColor( vk::ClearColorValue color_ ) VULKAN_HPP_NOEXCEPT
+ ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue color_ ) VULKAN_HPP_NOEXCEPT
{
color = color_;
return *this;
}
- ClearValue & setDepthStencil( vk::ClearDepthStencilValue depthStencil_ ) VULKAN_HPP_NOEXCEPT
+ ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) VULKAN_HPP_NOEXCEPT
{
depthStencil = depthStencil_;
return *this;
}
+
+ VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
+ return *this;
+ }
+
operator VkClearValue const&() const
{
return *reinterpret_cast<const VkClearValue*>(this);
@@ -24812,8 +24363,8 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
- vk::ClearColorValue color;
- vk::ClearDepthStencilValue depthStencil;
+ VULKAN_HPP_NAMESPACE::ClearColorValue color;
+ VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
#else
VkClearColorValue color;
VkClearDepthStencilValue depthStencil;
@@ -24822,9 +24373,9 @@ namespace VULKAN_HPP_NAMESPACE
struct ClearAttachment
{
- ClearAttachment( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(),
- uint32_t colorAttachment_ = 0,
- vk::ClearValue clearValue_ = vk::ClearValue() ) VULKAN_HPP_NOEXCEPT
+ ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ uint32_t colorAttachment_ = {},
+ VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, colorAttachment( colorAttachment_ )
, clearValue( clearValue_ )
@@ -24832,16 +24383,16 @@ namespace VULKAN_HPP_NAMESPACE
ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkClearAttachment*>(this) = rhs;
+ *this = rhs;
}
ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkClearAttachment*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>(&rhs);
return *this;
}
- ClearAttachment & setAspectMask( vk::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -24853,7 +24404,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ClearAttachment & setClearValue( vk::ClearValue clearValue_ ) VULKAN_HPP_NOEXCEPT
+ ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue clearValue_ ) VULKAN_HPP_NOEXCEPT
{
clearValue = clearValue_;
return *this;
@@ -24870,18 +24421,18 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageAspectFlags aspectMask;
- uint32_t colorAttachment;
- vk::ClearValue clearValue;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t colorAttachment = {};
+ VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
};
static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ClearAttachment>::value, "struct wrapper is not a standard layout!" );
struct ClearRect
{
- VULKAN_HPP_CONSTEXPR ClearRect( vk::Rect2D rect_ = vk::Rect2D(),
- uint32_t baseArrayLayer_ = 0,
- uint32_t layerCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {},
+ uint32_t baseArrayLayer_ = {},
+ uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
: rect( rect_ )
, baseArrayLayer( baseArrayLayer_ )
, layerCount( layerCount_ )
@@ -24889,16 +24440,16 @@ namespace VULKAN_HPP_NAMESPACE
ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkClearRect*>(this) = rhs;
+ *this = rhs;
}
ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkClearRect*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>(&rhs);
return *this;
}
- ClearRect & setRect( vk::Rect2D rect_ ) VULKAN_HPP_NOEXCEPT
+ ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ ) VULKAN_HPP_NOEXCEPT
{
rect = rect_;
return *this;
@@ -24939,18 +24490,18 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Rect2D rect;
- uint32_t baseArrayLayer;
- uint32_t layerCount;
+ VULKAN_HPP_NAMESPACE::Rect2D rect = {};
+ uint32_t baseArrayLayer = {};
+ uint32_t layerCount = {};
};
static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ClearRect>::value, "struct wrapper is not a standard layout!" );
struct IndirectCommandsTokenNVX
{
- VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( vk::IndirectCommandsTokenTypeNVX tokenType_ = vk::IndirectCommandsTokenTypeNVX::ePipeline,
- vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceSize offset_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline,
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
: tokenType( tokenType_ )
, buffer( buffer_ )
, offset( offset_ )
@@ -24958,28 +24509,28 @@ namespace VULKAN_HPP_NAMESPACE
IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkIndirectCommandsTokenNVX*>(this) = rhs;
+ *this = rhs;
}
IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkIndirectCommandsTokenNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX const *>(&rhs);
return *this;
}
- IndirectCommandsTokenNVX & setTokenType( vk::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
{
tokenType = tokenType_;
return *this;
}
- IndirectCommandsTokenNVX & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsTokenNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- IndirectCommandsTokenNVX & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsTokenNVX & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
@@ -25008,90 +24559,51 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::IndirectCommandsTokenTypeNVX tokenType;
- vk::Buffer buffer;
- vk::DeviceSize offset;
+ VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline;
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
};
static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<IndirectCommandsTokenNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct CmdProcessCommandsInfoNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(),
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(),
- uint32_t indirectCommandsTokenCount_ = 0,
- const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr,
- uint32_t maxSequencesCount_ = 0,
- vk::CommandBuffer targetCommandBuffer_ = vk::CommandBuffer(),
- vk::Buffer sequencesCountBuffer_ = vk::Buffer(),
- vk::DeviceSize sequencesCountOffset_ = 0,
- vk::Buffer sequencesIndexBuffer_ = vk::Buffer(),
- vk::DeviceSize sequencesIndexOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : objectTable( objectTable_ )
- , indirectCommandsLayout( indirectCommandsLayout_ )
- , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
- , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
- , maxSequencesCount( maxSequencesCount_ )
- , targetCommandBuffer( targetCommandBuffer_ )
- , sequencesCountBuffer( sequencesCountBuffer_ )
- , sequencesCountOffset( sequencesCountOffset_ )
- , sequencesIndexBuffer( sequencesIndexBuffer_ )
- , sequencesIndexOffset( sequencesIndexOffset_ )
- {}
-
- CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCmdProcessCommandsInfoNVX*>(this) = rhs;
- }
-
- CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCmdProcessCommandsInfoNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX;
- const void* pNext = nullptr;
- vk::ObjectTableNVX objectTable;
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout;
- uint32_t indirectCommandsTokenCount;
- const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens;
- uint32_t maxSequencesCount;
- vk::CommandBuffer targetCommandBuffer;
- vk::Buffer sequencesCountBuffer;
- vk::DeviceSize sequencesCountOffset;
- vk::Buffer sequencesIndexBuffer;
- vk::DeviceSize sequencesIndexOffset;
- };
- static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct CmdProcessCommandsInfoNVX : public layout::CmdProcessCommandsInfoNVX
+ struct CmdProcessCommandsInfoNVX
{
- VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(),
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(),
- uint32_t indirectCommandsTokenCount_ = 0,
- const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr,
- uint32_t maxSequencesCount_ = 0,
- vk::CommandBuffer targetCommandBuffer_ = vk::CommandBuffer(),
- vk::Buffer sequencesCountBuffer_ = vk::Buffer(),
- vk::DeviceSize sequencesCountOffset_ = 0,
- vk::Buffer sequencesIndexBuffer_ = vk::Buffer(),
- vk::DeviceSize sequencesIndexOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::CmdProcessCommandsInfoNVX( objectTable_, indirectCommandsLayout_, indirectCommandsTokenCount_, pIndirectCommandsTokens_, maxSequencesCount_, targetCommandBuffer_, sequencesCountBuffer_, sequencesCountOffset_, sequencesIndexBuffer_, sequencesIndexOffset_ )
+ VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {},
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {},
+ uint32_t indirectCommandsTokenCount_ = {},
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = {},
+ uint32_t maxSequencesCount_ = {},
+ VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectTable( objectTable_ )
+ , indirectCommandsLayout( indirectCommandsLayout_ )
+ , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
+ , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
+ , maxSequencesCount( maxSequencesCount_ )
+ , targetCommandBuffer( targetCommandBuffer_ )
+ , sequencesCountBuffer( sequencesCountBuffer_ )
+ , sequencesCountOffset( sequencesCountOffset_ )
+ , sequencesIndexBuffer( sequencesIndexBuffer_ )
+ , sequencesIndexOffset( sequencesIndexOffset_ )
{}
+ VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX ) - offsetof( CmdProcessCommandsInfoNVX, pNext ) );
+ return *this;
+ }
+
CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CmdProcessCommandsInfoNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CmdProcessCommandsInfoNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX const *>(&rhs);
return *this;
}
@@ -25101,13 +24613,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CmdProcessCommandsInfoNVX & setObjectTable( vk::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
{
objectTable = objectTable_;
return *this;
}
- CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
@@ -25119,7 +24631,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const vk::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT
{
pIndirectCommandsTokens = pIndirectCommandsTokens_;
return *this;
@@ -25131,31 +24643,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CmdProcessCommandsInfoNVX & setTargetCommandBuffer( vk::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setTargetCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT
{
targetCommandBuffer = targetCommandBuffer_;
return *this;
}
- CmdProcessCommandsInfoNVX & setSequencesCountBuffer( vk::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCountBuffer = sequencesCountBuffer_;
return *this;
}
- CmdProcessCommandsInfoNVX & setSequencesCountOffset( vk::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
{
sequencesCountOffset = sequencesCountOffset_;
return *this;
}
- CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( vk::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sequencesIndexBuffer = sequencesIndexBuffer_;
return *this;
}
- CmdProcessCommandsInfoNVX & setSequencesIndexOffset( vk::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
+ CmdProcessCommandsInfoNVX & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
{
sequencesIndexOffset = sequencesIndexOffset_;
return *this;
@@ -25192,61 +24704,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CmdProcessCommandsInfoNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {};
+ uint32_t indirectCommandsTokenCount = {};
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens = {};
+ uint32_t maxSequencesCount = {};
+ VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer = {};
+ VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
+ VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
};
static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CmdProcessCommandsInfoNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct CmdReserveSpaceForCommandsInfoNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(),
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(),
- uint32_t maxSequencesCount_ = 0 ) VULKAN_HPP_NOEXCEPT
- : objectTable( objectTable_ )
- , indirectCommandsLayout( indirectCommandsLayout_ )
- , maxSequencesCount( maxSequencesCount_ )
- {}
-
- CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX*>(this) = rhs;
- }
-
- CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCmdReserveSpaceForCommandsInfoNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX;
- const void* pNext = nullptr;
- vk::ObjectTableNVX objectTable;
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout;
- uint32_t maxSequencesCount;
- };
- static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct CmdReserveSpaceForCommandsInfoNVX : public layout::CmdReserveSpaceForCommandsInfoNVX
+ struct CmdReserveSpaceForCommandsInfoNVX
{
- VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( vk::ObjectTableNVX objectTable_ = vk::ObjectTableNVX(),
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ = vk::IndirectCommandsLayoutNVX(),
- uint32_t maxSequencesCount_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::CmdReserveSpaceForCommandsInfoNVX( objectTable_, indirectCommandsLayout_, maxSequencesCount_ )
+ VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {},
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {},
+ uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectTable( objectTable_ )
+ , indirectCommandsLayout( indirectCommandsLayout_ )
+ , maxSequencesCount( maxSequencesCount_ )
{}
+ VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX & operator=( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX ) - offsetof( CmdReserveSpaceForCommandsInfoNVX, pNext ) );
+ return *this;
+ }
+
CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CmdReserveSpaceForCommandsInfoNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CmdReserveSpaceForCommandsInfoNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX const *>(&rhs);
return *this;
}
@@ -25256,13 +24754,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CmdReserveSpaceForCommandsInfoNVX & setObjectTable( vk::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
+ CmdReserveSpaceForCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT
{
objectTable = objectTable_;
return *this;
}
- CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( vk::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
+ CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
{
indirectCommandsLayout = indirectCommandsLayout_;
return *this;
@@ -25298,17 +24796,21 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CmdReserveSpaceForCommandsInfoNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {};
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {};
+ uint32_t maxSequencesCount = {};
};
static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CmdReserveSpaceForCommandsInfoNVX>::value, "struct wrapper is not a standard layout!" );
struct CoarseSampleLocationNV
{
- VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = 0,
- uint32_t pixelY_ = 0,
- uint32_t sample_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {},
+ uint32_t pixelY_ = {},
+ uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
: pixelX( pixelX_ )
, pixelY( pixelY_ )
, sample( sample_ )
@@ -25316,12 +24818,12 @@ namespace VULKAN_HPP_NAMESPACE
CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkCoarseSampleLocationNV*>(this) = rhs;
+ *this = rhs;
}
CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkCoarseSampleLocationNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>(&rhs);
return *this;
}
@@ -25366,19 +24868,19 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t pixelX;
- uint32_t pixelY;
- uint32_t sample;
+ uint32_t pixelX = {};
+ uint32_t pixelY = {};
+ uint32_t sample = {};
};
static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
struct CoarseSampleOrderCustomNV
{
- VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( vk::ShadingRatePaletteEntryNV shadingRate_ = vk::ShadingRatePaletteEntryNV::eNoInvocations,
- uint32_t sampleCount_ = 0,
- uint32_t sampleLocationCount_ = 0,
- const vk::CoarseSampleLocationNV* pSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations,
+ uint32_t sampleCount_ = {},
+ uint32_t sampleLocationCount_ = {},
+ const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
: shadingRate( shadingRate_ )
, sampleCount( sampleCount_ )
, sampleLocationCount( sampleLocationCount_ )
@@ -25387,16 +24889,16 @@ namespace VULKAN_HPP_NAMESPACE
CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkCoarseSampleOrderCustomNV*>(this) = rhs;
+ *this = rhs;
}
CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkCoarseSampleOrderCustomNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>(&rhs);
return *this;
}
- CoarseSampleOrderCustomNV & setShadingRate( vk::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
{
shadingRate = shadingRate_;
return *this;
@@ -25414,7 +24916,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CoarseSampleOrderCustomNV & setPSampleLocations( const vk::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pSampleLocations = pSampleLocations_;
return *this;
@@ -25444,63 +24946,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ShadingRatePaletteEntryNV shadingRate;
- uint32_t sampleCount;
- uint32_t sampleLocationCount;
- const vk::CoarseSampleLocationNV* pSampleLocations;
+ VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
+ uint32_t sampleCount = {};
+ uint32_t sampleLocationCount = {};
+ const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {};
};
static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct CommandBufferAllocateInfo
{
- struct CommandBufferAllocateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( vk::CommandPool commandPool_ = vk::CommandPool(),
- vk::CommandBufferLevel level_ = vk::CommandBufferLevel::ePrimary,
- uint32_t commandBufferCount_ = 0 ) VULKAN_HPP_NOEXCEPT
- : commandPool( commandPool_ )
- , level( level_ )
- , commandBufferCount( commandBufferCount_ )
- {}
-
- CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferAllocateInfo*>(this) = rhs;
- }
-
- CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferAllocateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCommandBufferAllocateInfo;
- const void* pNext = nullptr;
- vk::CommandPool commandPool;
- vk::CommandBufferLevel level;
- uint32_t commandBufferCount;
- };
- static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct CommandBufferAllocateInfo : public layout::CommandBufferAllocateInfo
- {
- VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( vk::CommandPool commandPool_ = vk::CommandPool(),
- vk::CommandBufferLevel level_ = vk::CommandBufferLevel::ePrimary,
- uint32_t commandBufferCount_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferAllocateInfo( commandPool_, level_, commandBufferCount_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {},
+ VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
+ uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : commandPool( commandPool_ )
+ , level( level_ )
+ , commandBufferCount( commandBufferCount_ )
{}
+ VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) - offsetof( CommandBufferAllocateInfo, pNext ) );
+ return *this;
+ }
+
CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferAllocateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CommandBufferAllocateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>(&rhs);
return *this;
}
@@ -25510,13 +24987,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CommandBufferAllocateInfo & setCommandPool( vk::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
{
commandPool = commandPool_;
return *this;
}
- CommandBufferAllocateInfo & setLevel( vk::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
{
level = level_;
return *this;
@@ -25552,73 +25029,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CommandBufferAllocateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
+ VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
+ uint32_t commandBufferCount = {};
};
static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct CommandBufferInheritanceInfo
{
- struct CommandBufferInheritanceInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( vk::RenderPass renderPass_ = vk::RenderPass(),
- uint32_t subpass_ = 0,
- vk::Framebuffer framebuffer_ = vk::Framebuffer(),
- vk::Bool32 occlusionQueryEnable_ = 0,
- vk::QueryControlFlags queryFlags_ = vk::QueryControlFlags(),
- vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
- , subpass( subpass_ )
- , framebuffer( framebuffer_ )
- , occlusionQueryEnable( occlusionQueryEnable_ )
- , queryFlags( queryFlags_ )
- , pipelineStatistics( pipelineStatistics_ )
- {}
-
- CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferInheritanceInfo*>(this) = rhs;
- }
-
- CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferInheritanceInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
- const void* pNext = nullptr;
- vk::RenderPass renderPass;
- uint32_t subpass;
- vk::Framebuffer framebuffer;
- vk::Bool32 occlusionQueryEnable;
- vk::QueryControlFlags queryFlags;
- vk::QueryPipelineStatisticFlags pipelineStatistics;
- };
- static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct CommandBufferInheritanceInfo : public layout::CommandBufferInheritanceInfo
- {
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( vk::RenderPass renderPass_ = vk::RenderPass(),
- uint32_t subpass_ = 0,
- vk::Framebuffer framebuffer_ = vk::Framebuffer(),
- vk::Bool32 occlusionQueryEnable_ = 0,
- vk::QueryControlFlags queryFlags_ = vk::QueryControlFlags(),
- vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferInheritanceInfo( renderPass_, subpass_, framebuffer_, occlusionQueryEnable_, queryFlags_, pipelineStatistics_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ uint32_t subpass_ = {},
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {},
+ VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {},
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
+ : renderPass( renderPass_ )
+ , subpass( subpass_ )
+ , framebuffer( framebuffer_ )
+ , occlusionQueryEnable( occlusionQueryEnable_ )
+ , queryFlags( queryFlags_ )
+ , pipelineStatistics( pipelineStatistics_ )
{}
+ VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) );
+ return *this;
+ }
+
CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferInheritanceInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CommandBufferInheritanceInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>(&rhs);
return *this;
}
@@ -25628,7 +25078,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CommandBufferInheritanceInfo & setRenderPass( vk::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
@@ -25640,25 +25090,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CommandBufferInheritanceInfo & setFramebuffer( vk::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
framebuffer = framebuffer_;
return *this;
}
- CommandBufferInheritanceInfo & setOcclusionQueryEnable( vk::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryEnable = occlusionQueryEnable_;
return *this;
}
- CommandBufferInheritanceInfo & setQueryFlags( vk::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
{
queryFlags = queryFlags_;
return *this;
}
- CommandBufferInheritanceInfo & setPipelineStatistics( vk::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatistics = pipelineStatistics_;
return *this;
@@ -25691,57 +25141,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CommandBufferInheritanceInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ uint32_t subpass = {};
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+ VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
+ VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
};
static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct CommandBufferBeginInfo
{
- struct CommandBufferBeginInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( vk::CommandBufferUsageFlags flags_ = vk::CommandBufferUsageFlags(),
- const vk::CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pInheritanceInfo( pInheritanceInfo_ )
- {}
-
- CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferBeginInfo*>(this) = rhs;
- }
-
- CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferBeginInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCommandBufferBeginInfo;
- const void* pNext = nullptr;
- vk::CommandBufferUsageFlags flags;
- const vk::CommandBufferInheritanceInfo* pInheritanceInfo;
- };
- static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct CommandBufferBeginInfo : public layout::CommandBufferBeginInfo
- {
- VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( vk::CommandBufferUsageFlags flags_ = vk::CommandBufferUsageFlags(),
- const vk::CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferBeginInfo( flags_, pInheritanceInfo_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {},
+ const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pInheritanceInfo( pInheritanceInfo_ )
{}
+ VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) - offsetof( CommandBufferBeginInfo, pNext ) );
+ return *this;
+ }
+
CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferBeginInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CommandBufferBeginInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>(&rhs);
return *this;
}
@@ -25751,13 +25185,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CommandBufferBeginInfo & setFlags( vk::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- CommandBufferBeginInfo & setPInheritanceInfo( const vk::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
{
pInheritanceInfo = pInheritanceInfo_;
return *this;
@@ -25786,53 +25220,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CommandBufferBeginInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
+ const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {};
};
static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct CommandBufferInheritanceConditionalRenderingInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( vk::Bool32 conditionalRenderingEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : conditionalRenderingEnable( conditionalRenderingEnable_ )
- {}
-
- CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>(this) = rhs;
- }
-
- CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
- const void* pNext = nullptr;
- vk::Bool32 conditionalRenderingEnable;
- };
- static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct CommandBufferInheritanceConditionalRenderingInfoEXT : public layout::CommandBufferInheritanceConditionalRenderingInfoEXT
+ struct CommandBufferInheritanceConditionalRenderingInfoEXT
{
- VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( vk::Bool32 conditionalRenderingEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferInheritanceConditionalRenderingInfoEXT( conditionalRenderingEnable_ )
+ VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : conditionalRenderingEnable( conditionalRenderingEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) );
+ return *this;
+ }
+
CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CommandBufferInheritanceConditionalRenderingInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CommandBufferInheritanceConditionalRenderingInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>(&rhs);
return *this;
}
@@ -25842,7 +25258,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( vk::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
+ CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
{
conditionalRenderingEnable = conditionalRenderingEnable_;
return *this;
@@ -25870,57 +25286,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CommandBufferInheritanceConditionalRenderingInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
};
static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct CommandPoolCreateInfo
{
- struct CommandPoolCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( vk::CommandPoolCreateFlags flags_ = vk::CommandPoolCreateFlags(),
- uint32_t queueFamilyIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueFamilyIndex( queueFamilyIndex_ )
- {}
-
- CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandPoolCreateInfo*>(this) = rhs;
- }
-
- CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCommandPoolCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCommandPoolCreateInfo;
- const void* pNext = nullptr;
- vk::CommandPoolCreateFlags flags;
- uint32_t queueFamilyIndex;
- };
- static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct CommandPoolCreateInfo : public layout::CommandPoolCreateInfo
- {
- VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( vk::CommandPoolCreateFlags flags_ = vk::CommandPoolCreateFlags(),
- uint32_t queueFamilyIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::CommandPoolCreateInfo( flags_, queueFamilyIndex_ )
+ VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {},
+ uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) - offsetof( CommandPoolCreateInfo, pNext ) );
+ return *this;
+ }
+
CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CommandPoolCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CommandPoolCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>(&rhs);
return *this;
}
@@ -25930,7 +25325,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CommandPoolCreateInfo & setFlags( vk::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -25965,17 +25360,20 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CommandPoolCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
+ uint32_t queueFamilyIndex = {};
};
static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct SpecializationMapEntry
{
- VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = 0,
- uint32_t offset_ = 0,
- size_t size_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {},
+ uint32_t offset_ = {},
+ size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
: constantID( constantID_ )
, offset( offset_ )
, size( size_ )
@@ -25983,12 +25381,12 @@ namespace VULKAN_HPP_NAMESPACE
SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSpecializationMapEntry*>(this) = rhs;
+ *this = rhs;
}
SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSpecializationMapEntry*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>(&rhs);
return *this;
}
@@ -26033,19 +25431,19 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t constantID;
- uint32_t offset;
- size_t size;
+ uint32_t constantID = {};
+ uint32_t offset = {};
+ size_t size = {};
};
static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
struct SpecializationInfo
{
- VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = 0,
- const vk::SpecializationMapEntry* pMapEntries_ = nullptr,
- size_t dataSize_ = 0,
- const void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {},
+ size_t dataSize_ = {},
+ const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT
: mapEntryCount( mapEntryCount_ )
, pMapEntries( pMapEntries_ )
, dataSize( dataSize_ )
@@ -26054,12 +25452,12 @@ namespace VULKAN_HPP_NAMESPACE
SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSpecializationInfo*>(this) = rhs;
+ *this = rhs;
}
SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSpecializationInfo*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>(&rhs);
return *this;
}
@@ -26069,7 +25467,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SpecializationInfo & setPMapEntries( const vk::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
+ SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
{
pMapEntries = pMapEntries_;
return *this;
@@ -26111,71 +25509,42 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t mapEntryCount;
- const vk::SpecializationMapEntry* pMapEntries;
- size_t dataSize;
- const void* pData;
+ uint32_t mapEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
+ size_t dataSize = {};
+ const void* pData = {};
};
static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineShaderStageCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags flags_ = vk::PipelineShaderStageCreateFlags(),
- vk::ShaderStageFlagBits stage_ = vk::ShaderStageFlagBits::eVertex,
- vk::ShaderModule module_ = vk::ShaderModule(),
- const char* pName_ = nullptr,
- const vk::SpecializationInfo* pSpecializationInfo_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stage( stage_ )
- , module( module_ )
- , pName( pName_ )
- , pSpecializationInfo( pSpecializationInfo_ )
- {}
-
- PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineShaderStageCreateInfo*>(this) = rhs;
- }
-
- PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineShaderStageCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineShaderStageCreateFlags flags;
- vk::ShaderStageFlagBits stage;
- vk::ShaderModule module;
- const char* pName;
- const vk::SpecializationInfo* pSpecializationInfo;
- };
- static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineShaderStageCreateInfo : public layout::PipelineShaderStageCreateInfo
+ struct PipelineShaderStageCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( vk::PipelineShaderStageCreateFlags flags_ = vk::PipelineShaderStageCreateFlags(),
- vk::ShaderStageFlagBits stage_ = vk::ShaderStageFlagBits::eVertex,
- vk::ShaderModule module_ = vk::ShaderModule(),
- const char* pName_ = nullptr,
- const vk::SpecializationInfo* pSpecializationInfo_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineShaderStageCreateInfo( flags_, stage_, module_, pName_, pSpecializationInfo_ )
+ VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
+ VULKAN_HPP_NAMESPACE::ShaderModule module_ = {},
+ const char* pName_ = {},
+ const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , stage( stage_ )
+ , module( module_ )
+ , pName( pName_ )
+ , pSpecializationInfo( pSpecializationInfo_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineShaderStageCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineShaderStageCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>(&rhs);
return *this;
}
@@ -26185,19 +25554,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineShaderStageCreateInfo & setFlags( vk::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineShaderStageCreateInfo & setStage( vk::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
- PipelineShaderStageCreateInfo & setModule( vk::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
{
module = module_;
return *this;
@@ -26209,7 +25578,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineShaderStageCreateInfo & setPSpecializationInfo( const vk::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
+ PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pSpecializationInfo = pSpecializationInfo_;
return *this;
@@ -26230,7 +25599,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && vk::operator==( stage, rhs.stage )
+ && ( stage == rhs.stage )
&& ( module == rhs.module )
&& ( pName == rhs.pName )
&& ( pSpecializationInfo == rhs.pSpecializationInfo );
@@ -26241,69 +25610,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineShaderStageCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
+ VULKAN_HPP_NAMESPACE::ShaderModule module = {};
+ const char* pName = {};
+ const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
};
static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ComputePipelineCreateInfo
{
- struct ComputePipelineCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(),
- vk::PipelineShaderStageCreateInfo stage_ = vk::PipelineShaderStageCreateInfo(),
- vk::PipelineLayout layout_ = vk::PipelineLayout(),
- vk::Pipeline basePipelineHandle_ = vk::Pipeline(),
- int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stage( stage_ )
- , layout( layout_ )
- , basePipelineHandle( basePipelineHandle_ )
- , basePipelineIndex( basePipelineIndex_ )
- {}
-
- ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkComputePipelineCreateInfo*>(this) = rhs;
- }
-
- ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkComputePipelineCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eComputePipelineCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineCreateFlags flags;
- vk::PipelineShaderStageCreateInfo stage;
- vk::PipelineLayout layout;
- vk::Pipeline basePipelineHandle;
- int32_t basePipelineIndex;
- };
- static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ComputePipelineCreateInfo : public layout::ComputePipelineCreateInfo
- {
- VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(),
- vk::PipelineShaderStageCreateInfo stage_ = vk::PipelineShaderStageCreateInfo(),
- vk::PipelineLayout layout_ = vk::PipelineLayout(),
- vk::Pipeline basePipelineHandle_ = vk::Pipeline(),
- int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ComputePipelineCreateInfo( flags_, stage_, layout_, basePipelineHandle_, basePipelineIndex_ )
+ VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , stage( stage_ )
+ , layout( layout_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) - offsetof( ComputePipelineCreateInfo, pNext ) );
+ return *this;
+ }
+
ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ComputePipelineCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ComputePipelineCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>(&rhs);
return *this;
}
@@ -26313,25 +25659,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ComputePipelineCreateInfo & setFlags( vk::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ComputePipelineCreateInfo & setStage( vk::PipelineShaderStageCreateInfo stage_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ ) VULKAN_HPP_NOEXCEPT
{
stage = stage_;
return *this;
}
- ComputePipelineCreateInfo & setLayout( vk::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- ComputePipelineCreateInfo & setBasePipelineHandle( vk::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
@@ -26369,61 +25715,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ComputePipelineCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
};
static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ConditionalRenderingBeginInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceSize offset_ = 0,
- vk::ConditionalRenderingFlagsEXT flags_ = vk::ConditionalRenderingFlagsEXT() ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- , offset( offset_ )
- , flags( flags_ )
- {}
-
- ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>(this) = rhs;
- }
-
- ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
- const void* pNext = nullptr;
- vk::Buffer buffer;
- vk::DeviceSize offset;
- vk::ConditionalRenderingFlagsEXT flags;
- };
- static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ConditionalRenderingBeginInfoEXT : public layout::ConditionalRenderingBeginInfoEXT
+ struct ConditionalRenderingBeginInfoEXT
{
- VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceSize offset_ = 0,
- vk::ConditionalRenderingFlagsEXT flags_ = vk::ConditionalRenderingFlagsEXT() ) VULKAN_HPP_NOEXCEPT
- : layout::ConditionalRenderingBeginInfoEXT( buffer_, offset_, flags_ )
+ VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
+ , offset( offset_ )
+ , flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) );
+ return *this;
+ }
+
ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ConditionalRenderingBeginInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ConditionalRenderingBeginInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>(&rhs);
return *this;
}
@@ -26433,19 +25760,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ConditionalRenderingBeginInfoEXT & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- ConditionalRenderingBeginInfoEXT & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- ConditionalRenderingBeginInfoEXT & setFlags( vk::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -26475,70 +25802,74 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ConditionalRenderingBeginInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
};
static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
- struct ConformanceVersionKHR
+ struct ConformanceVersion
{
- VULKAN_HPP_CONSTEXPR ConformanceVersionKHR( uint8_t major_ = 0,
- uint8_t minor_ = 0,
- uint8_t subminor_ = 0,
- uint8_t patch_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {},
+ uint8_t minor_ = {},
+ uint8_t subminor_ = {},
+ uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT
: major( major_ )
, minor( minor_ )
, subminor( subminor_ )
, patch( patch_ )
{}
- ConformanceVersionKHR( VkConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkConformanceVersionKHR*>(this) = rhs;
+ *this = rhs;
}
- ConformanceVersionKHR& operator=( VkConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion& operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkConformanceVersionKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>(&rhs);
return *this;
}
- ConformanceVersionKHR & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
{
major = major_;
return *this;
}
- ConformanceVersionKHR & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
{
minor = minor_;
return *this;
}
- ConformanceVersionKHR & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
{
subminor = subminor_;
return *this;
}
- ConformanceVersionKHR & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
+ ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
{
patch = patch_;
return *this;
}
- operator VkConformanceVersionKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkConformanceVersionKHR*>( this );
+ return *reinterpret_cast<const VkConformanceVersion*>( this );
}
- operator VkConformanceVersionKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkConformanceVersionKHR*>( this );
+ return *reinterpret_cast<VkConformanceVersion*>( this );
}
- bool operator==( ConformanceVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( major == rhs.major )
&& ( minor == rhs.minor )
@@ -26546,89 +25877,54 @@ namespace VULKAN_HPP_NAMESPACE
&& ( patch == rhs.patch );
}
- bool operator!=( ConformanceVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
public:
- uint8_t major;
- uint8_t minor;
- uint8_t subminor;
- uint8_t patch;
+ uint8_t major = {};
+ uint8_t minor = {};
+ uint8_t subminor = {};
+ uint8_t patch = {};
};
- static_assert( sizeof( ConformanceVersionKHR ) == sizeof( VkConformanceVersionKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ConformanceVersionKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct CooperativeMatrixPropertiesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = 0,
- uint32_t NSize_ = 0,
- uint32_t KSize_ = 0,
- vk::ComponentTypeNV AType_ = vk::ComponentTypeNV::eFloat16,
- vk::ComponentTypeNV BType_ = vk::ComponentTypeNV::eFloat16,
- vk::ComponentTypeNV CType_ = vk::ComponentTypeNV::eFloat16,
- vk::ComponentTypeNV DType_ = vk::ComponentTypeNV::eFloat16,
- vk::ScopeNV scope_ = vk::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
- : MSize( MSize_ )
- , NSize( NSize_ )
- , KSize( KSize_ )
- , AType( AType_ )
- , BType( BType_ )
- , CType( CType_ )
- , DType( DType_ )
- , scope( scope_ )
- {}
-
- CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>(this) = rhs;
- }
-
- CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
- void* pNext = nullptr;
- uint32_t MSize;
- uint32_t NSize;
- uint32_t KSize;
- vk::ComponentTypeNV AType;
- vk::ComponentTypeNV BType;
- vk::ComponentTypeNV CType;
- vk::ComponentTypeNV DType;
- vk::ScopeNV scope;
- };
- static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
- struct CooperativeMatrixPropertiesNV : public layout::CooperativeMatrixPropertiesNV
+ struct CooperativeMatrixPropertiesNV
{
- VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = 0,
- uint32_t NSize_ = 0,
- uint32_t KSize_ = 0,
- vk::ComponentTypeNV AType_ = vk::ComponentTypeNV::eFloat16,
- vk::ComponentTypeNV BType_ = vk::ComponentTypeNV::eFloat16,
- vk::ComponentTypeNV CType_ = vk::ComponentTypeNV::eFloat16,
- vk::ComponentTypeNV DType_ = vk::ComponentTypeNV::eFloat16,
- vk::ScopeNV scope_ = vk::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
- : layout::CooperativeMatrixPropertiesNV( MSize_, NSize_, KSize_, AType_, BType_, CType_, DType_, scope_ )
+ VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {},
+ uint32_t NSize_ = {},
+ uint32_t KSize_ = {},
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
+ VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
+ : MSize( MSize_ )
+ , NSize( NSize_ )
+ , KSize( KSize_ )
+ , AType( AType_ )
+ , BType( BType_ )
+ , CType( CType_ )
+ , DType( DType_ )
+ , scope( scope_ )
{}
+ VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) );
+ return *this;
+ }
+
CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CooperativeMatrixPropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CooperativeMatrixPropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>(&rhs);
return *this;
}
@@ -26656,31 +25952,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CooperativeMatrixPropertiesNV & setAType( vk::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
+ CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
{
AType = AType_;
return *this;
}
- CooperativeMatrixPropertiesNV & setBType( vk::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
+ CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
{
BType = BType_;
return *this;
}
- CooperativeMatrixPropertiesNV & setCType( vk::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
+ CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
{
CType = CType_;
return *this;
}
- CooperativeMatrixPropertiesNV & setDType( vk::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
+ CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
{
DType = DType_;
return *this;
}
- CooperativeMatrixPropertiesNV & setScope( vk::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
+ CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
{
scope = scope_;
return *this;
@@ -26715,77 +26011,53 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CooperativeMatrixPropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
+ void* pNext = {};
+ uint32_t MSize = {};
+ uint32_t NSize = {};
+ uint32_t KSize = {};
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
+ VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
};
static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct CopyDescriptorSet
- {
- protected:
- VULKAN_HPP_CONSTEXPR CopyDescriptorSet( vk::DescriptorSet srcSet_ = vk::DescriptorSet(),
- uint32_t srcBinding_ = 0,
- uint32_t srcArrayElement_ = 0,
- vk::DescriptorSet dstSet_ = vk::DescriptorSet(),
- uint32_t dstBinding_ = 0,
- uint32_t dstArrayElement_ = 0,
- uint32_t descriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
- : srcSet( srcSet_ )
- , srcBinding( srcBinding_ )
- , srcArrayElement( srcArrayElement_ )
- , dstSet( dstSet_ )
- , dstBinding( dstBinding_ )
- , dstArrayElement( dstArrayElement_ )
- , descriptorCount( descriptorCount_ )
- {}
-
- CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCopyDescriptorSet*>(this) = rhs;
- }
-
- CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkCopyDescriptorSet*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eCopyDescriptorSet;
- const void* pNext = nullptr;
- vk::DescriptorSet srcSet;
- uint32_t srcBinding;
- uint32_t srcArrayElement;
- vk::DescriptorSet dstSet;
- uint32_t dstBinding;
- uint32_t dstArrayElement;
- uint32_t descriptorCount;
- };
- static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "layout struct and wrapper have different size!" );
- }
-
- struct CopyDescriptorSet : public layout::CopyDescriptorSet
- {
- VULKAN_HPP_CONSTEXPR CopyDescriptorSet( vk::DescriptorSet srcSet_ = vk::DescriptorSet(),
- uint32_t srcBinding_ = 0,
- uint32_t srcArrayElement_ = 0,
- vk::DescriptorSet dstSet_ = vk::DescriptorSet(),
- uint32_t dstBinding_ = 0,
- uint32_t dstArrayElement_ = 0,
- uint32_t descriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::CopyDescriptorSet( srcSet_, srcBinding_, srcArrayElement_, dstSet_, dstBinding_, dstArrayElement_, descriptorCount_ )
+ struct CopyDescriptorSet
+ {
+ VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {},
+ uint32_t srcBinding_ = {},
+ uint32_t srcArrayElement_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
+ uint32_t dstBinding_ = {},
+ uint32_t dstArrayElement_ = {},
+ uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : srcSet( srcSet_ )
+ , srcBinding( srcBinding_ )
+ , srcArrayElement( srcArrayElement_ )
+ , dstSet( dstSet_ )
+ , dstBinding( dstBinding_ )
+ , dstArrayElement( dstArrayElement_ )
+ , descriptorCount( descriptorCount_ )
{}
+ VULKAN_HPP_NAMESPACE::CopyDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) - offsetof( CopyDescriptorSet, pNext ) );
+ return *this;
+ }
+
CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::CopyDescriptorSet( rhs )
- {}
+ {
+ *this = rhs;
+ }
CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::CopyDescriptorSet::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>(&rhs);
return *this;
}
@@ -26795,7 +26067,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CopyDescriptorSet & setSrcSet( vk::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
{
srcSet = srcSet_;
return *this;
@@ -26813,7 +26085,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- CopyDescriptorSet & setDstSet( vk::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
@@ -26865,67 +26137,48 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::CopyDescriptorSet::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
+ uint32_t srcBinding = {};
+ uint32_t srcArrayElement = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+ uint32_t dstBinding = {};
+ uint32_t dstArrayElement = {};
+ uint32_t descriptorCount = {};
};
static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct D3D12FenceSubmitInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0,
- const uint64_t* pWaitSemaphoreValues_ = nullptr,
- uint32_t signalSemaphoreValuesCount_ = 0,
- const uint64_t* pSignalSemaphoreValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
- , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
- , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
- , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
- {}
-
- D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>(this) = rhs;
- }
-
- D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
- const void* pNext = nullptr;
- uint32_t waitSemaphoreValuesCount;
- const uint64_t* pWaitSemaphoreValues;
- uint32_t signalSemaphoreValuesCount;
- const uint64_t* pSignalSemaphoreValues;
- };
- static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct D3D12FenceSubmitInfoKHR : public layout::D3D12FenceSubmitInfoKHR
+ struct D3D12FenceSubmitInfoKHR
{
- VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = 0,
- const uint64_t* pWaitSemaphoreValues_ = nullptr,
- uint32_t signalSemaphoreValuesCount_ = 0,
- const uint64_t* pSignalSemaphoreValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::D3D12FenceSubmitInfoKHR( waitSemaphoreValuesCount_, pWaitSemaphoreValues_, signalSemaphoreValuesCount_, pSignalSemaphoreValues_ )
+ VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {},
+ const uint64_t* pWaitSemaphoreValues_ = {},
+ uint32_t signalSemaphoreValuesCount_ = {},
+ const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
+ , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
+ , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
+ , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
{}
+ VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) - offsetof( D3D12FenceSubmitInfoKHR, pNext ) );
+ return *this;
+ }
+
D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::D3D12FenceSubmitInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::D3D12FenceSubmitInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>(&rhs);
return *this;
}
@@ -26984,60 +26237,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::D3D12FenceSubmitInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
+ const void* pNext = {};
+ uint32_t waitSemaphoreValuesCount = {};
+ const uint64_t* pWaitSemaphoreValues = {};
+ uint32_t signalSemaphoreValuesCount = {};
+ const uint64_t* pSignalSemaphoreValues = {};
};
static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
+ struct DebugMarkerMarkerInfoEXT
{
- struct DebugMarkerMarkerInfoEXT
+ VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = {},
+ std::array<float,4> const& color_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pMarkerName( pMarkerName_ )
+ , color{}
{
- protected:
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr,
- std::array<float,4> const& color_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
- : pMarkerName( pMarkerName_ )
- , color{}
- {
- vk::ConstExpressionArrayCopy<float,4,4>::copy( color, color_ );
- }
-
- DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>(this) = rhs;
- }
-
- DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
- const void* pNext = nullptr;
- const char* pMarkerName;
- float color[4];
- };
- static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,4,4>::copy( color, color_ );
+ }
- struct DebugMarkerMarkerInfoEXT : public layout::DebugMarkerMarkerInfoEXT
- {
- VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr,
- std::array<float,4> const& color_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
- : layout::DebugMarkerMarkerInfoEXT( pMarkerName_, color_ )
- {}
+ VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) - offsetof( DebugMarkerMarkerInfoEXT, pNext ) );
+ return *this;
+ }
DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugMarkerMarkerInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugMarkerMarkerInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>(&rhs);
return *this;
}
@@ -27082,61 +26317,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugMarkerMarkerInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
+ const void* pNext = {};
+ const char* pMarkerName = {};
+ float color[4] = {};
};
static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DebugMarkerObjectNameInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = 0,
- const char* pObjectName_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , object( object_ )
- , pObjectName( pObjectName_ )
- {}
-
- DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>(this) = rhs;
- }
-
- DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
- const void* pNext = nullptr;
- vk::DebugReportObjectTypeEXT objectType;
- uint64_t object;
- const char* pObjectName;
- };
- static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugMarkerObjectNameInfoEXT : public layout::DebugMarkerObjectNameInfoEXT
+ struct DebugMarkerObjectNameInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = 0,
- const char* pObjectName_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugMarkerObjectNameInfoEXT( objectType_, object_, pObjectName_ )
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
+ uint64_t object_ = {},
+ const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ )
+ , object( object_ )
+ , pObjectName( pObjectName_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) );
+ return *this;
+ }
+
DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugMarkerObjectNameInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugMarkerObjectNameInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>(&rhs);
return *this;
}
@@ -27146,7 +26359,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugMarkerObjectNameInfoEXT & setObjectType( vk::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+ DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -27188,69 +26401,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugMarkerObjectNameInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+ uint64_t object = {};
+ const char* pObjectName = {};
};
static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DebugMarkerObjectTagInfoEXT
{
- struct DebugMarkerObjectTagInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = 0,
- uint64_t tagName_ = 0,
- size_t tagSize_ = 0,
- const void* pTag_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , object( object_ )
- , tagName( tagName_ )
- , tagSize( tagSize_ )
- , pTag( pTag_ )
- {}
-
- DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>(this) = rhs;
- }
-
- DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
- const void* pNext = nullptr;
- vk::DebugReportObjectTypeEXT objectType;
- uint64_t object;
- uint64_t tagName;
- size_t tagSize;
- const void* pTag;
- };
- static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugMarkerObjectTagInfoEXT : public layout::DebugMarkerObjectTagInfoEXT
- {
- VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( vk::DebugReportObjectTypeEXT objectType_ = vk::DebugReportObjectTypeEXT::eUnknown,
- uint64_t object_ = 0,
- uint64_t tagName_ = 0,
- size_t tagSize_ = 0,
- const void* pTag_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugMarkerObjectTagInfoEXT( objectType_, object_, tagName_, tagSize_, pTag_ )
+ VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
+ uint64_t object_ = {},
+ uint64_t tagName_ = {},
+ size_t tagSize_ = {},
+ const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ )
+ , object( object_ )
+ , tagName( tagName_ )
+ , tagSize( tagSize_ )
+ , pTag( pTag_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) );
+ return *this;
+ }
+
DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugMarkerObjectTagInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugMarkerObjectTagInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>(&rhs);
return *this;
}
@@ -27260,7 +26448,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugMarkerObjectTagInfoEXT & setObjectType( vk::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
+ DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -27316,61 +26504,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugMarkerObjectTagInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
+ uint64_t object = {};
+ uint64_t tagName = {};
+ size_t tagSize = {};
+ const void* pTag = {};
};
static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DebugReportCallbackCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( vk::DebugReportFlagsEXT flags_ = vk::DebugReportFlagsEXT(),
- PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr,
- void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pfnCallback( pfnCallback_ )
- , pUserData( pUserData_ )
- {}
-
- DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>(this) = rhs;
- }
-
- DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
- const void* pNext = nullptr;
- vk::DebugReportFlagsEXT flags;
- PFN_vkDebugReportCallbackEXT pfnCallback;
- void* pUserData;
- };
- static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugReportCallbackCreateInfoEXT : public layout::DebugReportCallbackCreateInfoEXT
+ struct DebugReportCallbackCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( vk::DebugReportFlagsEXT flags_ = vk::DebugReportFlagsEXT(),
- PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr,
- void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugReportCallbackCreateInfoEXT( flags_, pfnCallback_, pUserData_ )
+ VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {},
+ PFN_vkDebugReportCallbackEXT pfnCallback_ = {},
+ void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pfnCallback( pfnCallback_ )
+ , pUserData( pUserData_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugReportCallbackCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugReportCallbackCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -27380,7 +26549,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugReportCallbackCreateInfoEXT & setFlags( vk::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -27422,59 +26591,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugReportCallbackCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
+ PFN_vkDebugReportCallbackEXT pfnCallback = {};
+ void* pUserData = {};
};
static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DebugUtilsLabelEXT
{
- struct DebugUtilsLabelEXT
+ VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = {},
+ std::array<float,4> const& color_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pLabelName( pLabelName_ )
+ , color{}
{
- protected:
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = nullptr,
- std::array<float,4> const& color_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
- : pLabelName( pLabelName_ )
- , color{}
- {
- vk::ConstExpressionArrayCopy<float,4,4>::copy( color, color_ );
- }
-
- DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsLabelEXT*>(this) = rhs;
- }
-
- DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsLabelEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugUtilsLabelEXT;
- const void* pNext = nullptr;
- const char* pLabelName;
- float color[4];
- };
- static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,4,4>::copy( color, color_ );
+ }
- struct DebugUtilsLabelEXT : public layout::DebugUtilsLabelEXT
- {
- VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = nullptr,
- std::array<float,4> const& color_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsLabelEXT( pLabelName_, color_ )
- {}
+ VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) - offsetof( DebugUtilsLabelEXT, pNext ) );
+ return *this;
+ }
DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsLabelEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugUtilsLabelEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>(&rhs);
return *this;
}
@@ -27519,61 +26669,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugUtilsLabelEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
+ const void* pNext = {};
+ const char* pLabelName = {};
+ float color[4] = {};
};
static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DebugUtilsObjectNameInfoEXT
{
- struct DebugUtilsObjectNameInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown,
- uint64_t objectHandle_ = 0,
- const char* pObjectName_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , objectHandle( objectHandle_ )
- , pObjectName( pObjectName_ )
- {}
-
- DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>(this) = rhs;
- }
-
- DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
- const void* pNext = nullptr;
- vk::ObjectType objectType;
- uint64_t objectHandle;
- const char* pObjectName;
- };
- static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugUtilsObjectNameInfoEXT : public layout::DebugUtilsObjectNameInfoEXT
- {
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown,
- uint64_t objectHandle_ = 0,
- const char* pObjectName_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsObjectNameInfoEXT( objectType_, objectHandle_, pObjectName_ )
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+ uint64_t objectHandle_ = {},
+ const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ )
+ , objectHandle( objectHandle_ )
+ , pObjectName( pObjectName_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) );
+ return *this;
+ }
+
DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsObjectNameInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugUtilsObjectNameInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>(&rhs);
return *this;
}
@@ -27583,7 +26711,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsObjectNameInfoEXT & setObjectType( vk::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -27625,89 +26753,54 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugUtilsObjectNameInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+ uint64_t objectHandle = {};
+ const char* pObjectName = {};
};
static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DebugUtilsMessengerCallbackDataEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCallbackDataEXT( vk::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = vk::DebugUtilsMessengerCallbackDataFlagsEXT(),
- const char* pMessageIdName_ = nullptr,
- int32_t messageIdNumber_ = 0,
- const char* pMessage_ = nullptr,
- uint32_t queueLabelCount_ = 0,
- const vk::DebugUtilsLabelEXT* pQueueLabels_ = nullptr,
- uint32_t cmdBufLabelCount_ = 0,
- const vk::DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr,
- uint32_t objectCount_ = 0,
- const vk::DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pMessageIdName( pMessageIdName_ )
- , messageIdNumber( messageIdNumber_ )
- , pMessage( pMessage_ )
- , queueLabelCount( queueLabelCount_ )
- , pQueueLabels( pQueueLabels_ )
- , cmdBufLabelCount( cmdBufLabelCount_ )
- , pCmdBufLabels( pCmdBufLabels_ )
- , objectCount( objectCount_ )
- , pObjects( pObjects_ )
- {}
-
- DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>(this) = rhs;
- }
-
- DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
- const void* pNext = nullptr;
- vk::DebugUtilsMessengerCallbackDataFlagsEXT flags;
- const char* pMessageIdName;
- int32_t messageIdNumber;
- const char* pMessage;
- uint32_t queueLabelCount;
- const vk::DebugUtilsLabelEXT* pQueueLabels;
- uint32_t cmdBufLabelCount;
- const vk::DebugUtilsLabelEXT* pCmdBufLabels;
- uint32_t objectCount;
- const vk::DebugUtilsObjectNameInfoEXT* pObjects;
- };
- static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugUtilsMessengerCallbackDataEXT : public layout::DebugUtilsMessengerCallbackDataEXT
- {
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCallbackDataEXT( vk::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = vk::DebugUtilsMessengerCallbackDataFlagsEXT(),
- const char* pMessageIdName_ = nullptr,
- int32_t messageIdNumber_ = 0,
- const char* pMessage_ = nullptr,
- uint32_t queueLabelCount_ = 0,
- const vk::DebugUtilsLabelEXT* pQueueLabels_ = nullptr,
- uint32_t cmdBufLabelCount_ = 0,
- const vk::DebugUtilsLabelEXT* pCmdBufLabels_ = nullptr,
- uint32_t objectCount_ = 0,
- const vk::DebugUtilsObjectNameInfoEXT* pObjects_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsMessengerCallbackDataEXT( flags_, pMessageIdName_, messageIdNumber_, pMessage_, queueLabelCount_, pQueueLabels_, cmdBufLabelCount_, pCmdBufLabels_, objectCount_, pObjects_ )
+ struct DebugUtilsMessengerCallbackDataEXT
+ {
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {},
+ const char* pMessageIdName_ = {},
+ int32_t messageIdNumber_ = {},
+ const char* pMessage_ = {},
+ uint32_t queueLabelCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {},
+ uint32_t cmdBufLabelCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {},
+ uint32_t objectCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pMessageIdName( pMessageIdName_ )
+ , messageIdNumber( messageIdNumber_ )
+ , pMessage( pMessage_ )
+ , queueLabelCount( queueLabelCount_ )
+ , pQueueLabels( pQueueLabels_ )
+ , cmdBufLabelCount( cmdBufLabelCount_ )
+ , pCmdBufLabels( pCmdBufLabels_ )
+ , objectCount( objectCount_ )
+ , pObjects( pObjects_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) );
+ return *this;
+ }
+
DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsMessengerCallbackDataEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugUtilsMessengerCallbackDataEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>(&rhs);
return *this;
}
@@ -27717,7 +26810,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsMessengerCallbackDataEXT & setFlags( vk::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -27747,7 +26840,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const vk::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
{
pQueueLabels = pQueueLabels_;
return *this;
@@ -27759,7 +26852,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const vk::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
{
pCmdBufLabels = pCmdBufLabels_;
return *this;
@@ -27771,7 +26864,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsMessengerCallbackDataEXT & setPObjects( const vk::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
{
pObjects = pObjects_;
return *this;
@@ -27808,69 +26901,51 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugUtilsMessengerCallbackDataEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
+ const char* pMessageIdName = {};
+ int32_t messageIdNumber = {};
+ const char* pMessage = {};
+ uint32_t queueLabelCount = {};
+ const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {};
+ uint32_t cmdBufLabelCount = {};
+ const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {};
+ uint32_t objectCount = {};
+ const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {};
};
static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DebugUtilsMessengerCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( vk::DebugUtilsMessengerCreateFlagsEXT flags_ = vk::DebugUtilsMessengerCreateFlagsEXT(),
- vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = vk::DebugUtilsMessageSeverityFlagsEXT(),
- vk::DebugUtilsMessageTypeFlagsEXT messageType_ = vk::DebugUtilsMessageTypeFlagsEXT(),
- PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr,
- void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , messageSeverity( messageSeverity_ )
- , messageType( messageType_ )
- , pfnUserCallback( pfnUserCallback_ )
- , pUserData( pUserData_ )
- {}
-
- DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>(this) = rhs;
- }
-
- DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
- const void* pNext = nullptr;
- vk::DebugUtilsMessengerCreateFlagsEXT flags;
- vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity;
- vk::DebugUtilsMessageTypeFlagsEXT messageType;
- PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback;
- void* pUserData;
- };
- static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugUtilsMessengerCreateInfoEXT : public layout::DebugUtilsMessengerCreateInfoEXT
+ struct DebugUtilsMessengerCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( vk::DebugUtilsMessengerCreateFlagsEXT flags_ = vk::DebugUtilsMessengerCreateFlagsEXT(),
- vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = vk::DebugUtilsMessageSeverityFlagsEXT(),
- vk::DebugUtilsMessageTypeFlagsEXT messageType_ = vk::DebugUtilsMessageTypeFlagsEXT(),
- PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = nullptr,
- void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsMessengerCreateInfoEXT( flags_, messageSeverity_, messageType_, pfnUserCallback_, pUserData_ )
+ VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {},
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {},
+ PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {},
+ void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , messageSeverity( messageSeverity_ )
+ , messageType( messageType_ )
+ , pfnUserCallback( pfnUserCallback_ )
+ , pUserData( pUserData_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsMessengerCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugUtilsMessengerCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -27880,19 +26955,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsMessengerCreateInfoEXT & setFlags( vk::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( vk::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
{
messageSeverity = messageSeverity_;
return *this;
}
- DebugUtilsMessengerCreateInfoEXT & setMessageType( vk::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
{
messageType = messageType_;
return *this;
@@ -27936,69 +27011,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugUtilsMessengerCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
+ PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
+ void* pUserData = {};
};
static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DebugUtilsObjectTagInfoEXT
{
- struct DebugUtilsObjectTagInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown,
- uint64_t objectHandle_ = 0,
- uint64_t tagName_ = 0,
- size_t tagSize_ = 0,
- const void* pTag_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : objectType( objectType_ )
- , objectHandle( objectHandle_ )
- , tagName( tagName_ )
- , tagSize( tagSize_ )
- , pTag( pTag_ )
- {}
-
- DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>(this) = rhs;
- }
-
- DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
- const void* pNext = nullptr;
- vk::ObjectType objectType;
- uint64_t objectHandle;
- uint64_t tagName;
- size_t tagSize;
- const void* pTag;
- };
- static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DebugUtilsObjectTagInfoEXT : public layout::DebugUtilsObjectTagInfoEXT
- {
- VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( vk::ObjectType objectType_ = vk::ObjectType::eUnknown,
- uint64_t objectHandle_ = 0,
- uint64_t tagName_ = 0,
- size_t tagSize_ = 0,
- const void* pTag_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsObjectTagInfoEXT( objectType_, objectHandle_, tagName_, tagSize_, pTag_ )
+ VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
+ uint64_t objectHandle_ = {},
+ uint64_t tagName_ = {},
+ size_t tagSize_ = {},
+ const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectType( objectType_ )
+ , objectHandle( objectHandle_ )
+ , tagName( tagName_ )
+ , tagSize( tagSize_ )
+ , pTag( pTag_ )
{}
+ VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) );
+ return *this;
+ }
+
DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DebugUtilsObjectTagInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DebugUtilsObjectTagInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>(&rhs);
return *this;
}
@@ -28008,7 +27060,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DebugUtilsObjectTagInfoEXT & setObjectType( vk::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
+ DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
{
objectType = objectType_;
return *this;
@@ -28064,53 +27116,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DebugUtilsObjectTagInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
+ uint64_t objectHandle = {};
+ uint64_t tagName = {};
+ size_t tagSize = {};
+ const void* pTag = {};
};
static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DedicatedAllocationBufferCreateInfoNV
{
- struct DedicatedAllocationBufferCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocation( dedicatedAllocation_ )
- {}
-
- DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>(this) = rhs;
- }
-
- DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
- const void* pNext = nullptr;
- vk::Bool32 dedicatedAllocation;
- };
- static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct DedicatedAllocationBufferCreateInfoNV : public layout::DedicatedAllocationBufferCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DedicatedAllocationBufferCreateInfoNV( dedicatedAllocation_ )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dedicatedAllocation( dedicatedAllocation_ )
{}
+ VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) );
+ return *this;
+ }
+
DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DedicatedAllocationBufferCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DedicatedAllocationBufferCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>(&rhs);
return *this;
}
@@ -28120,7 +27157,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( vk::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocation = dedicatedAllocation_;
return *this;
@@ -28148,53 +27185,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DedicatedAllocationBufferCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
};
static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DedicatedAllocationImageCreateInfoNV
{
- struct DedicatedAllocationImageCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocation( dedicatedAllocation_ )
- {}
-
- DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>(this) = rhs;
- }
-
- DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
- const void* pNext = nullptr;
- vk::Bool32 dedicatedAllocation;
- };
- static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct DedicatedAllocationImageCreateInfoNV : public layout::DedicatedAllocationImageCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( vk::Bool32 dedicatedAllocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DedicatedAllocationImageCreateInfoNV( dedicatedAllocation_ )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dedicatedAllocation( dedicatedAllocation_ )
{}
+ VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) );
+ return *this;
+ }
+
DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DedicatedAllocationImageCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DedicatedAllocationImageCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>(&rhs);
return *this;
}
@@ -28204,7 +27222,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( vk::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocation = dedicatedAllocation_;
return *this;
@@ -28232,57 +27250,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DedicatedAllocationImageCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
};
static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DedicatedAllocationMemoryAllocateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( vk::Image image_ = vk::Image(),
- vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , buffer( buffer_ )
- {}
-
- DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>(this) = rhs;
- }
-
- DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
- const void* pNext = nullptr;
- vk::Image image;
- vk::Buffer buffer;
- };
- static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct DedicatedAllocationMemoryAllocateInfoNV : public layout::DedicatedAllocationMemoryAllocateInfoNV
+ struct DedicatedAllocationMemoryAllocateInfoNV
{
- VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( vk::Image image_ = vk::Image(),
- vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : layout::DedicatedAllocationMemoryAllocateInfoNV( image_, buffer_ )
+ VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
+ , buffer( buffer_ )
{}
+ VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) );
+ return *this;
+ }
+
DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DedicatedAllocationMemoryAllocateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DedicatedAllocationMemoryAllocateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>(&rhs);
return *this;
}
@@ -28292,13 +27289,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DedicatedAllocationMemoryAllocateInfoNV & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- DedicatedAllocationMemoryAllocateInfoNV & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
@@ -28327,17 +27324,20 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DedicatedAllocationMemoryAllocateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
struct DescriptorBufferInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( vk::Buffer buffer_ = vk::Buffer(),
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize range_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT
: buffer( buffer_ )
, offset( offset_ )
, range( range_ )
@@ -28345,28 +27345,28 @@ namespace VULKAN_HPP_NAMESPACE
DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorBufferInfo*>(this) = rhs;
+ *this = rhs;
}
DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorBufferInfo*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>(&rhs);
return *this;
}
- DescriptorBufferInfo & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- DescriptorBufferInfo & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- DescriptorBufferInfo & setRange( vk::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
{
range = range_;
return *this;
@@ -28395,18 +27395,18 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Buffer buffer;
- vk::DeviceSize offset;
- vk::DeviceSize range;
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize range = {};
};
static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
struct DescriptorImageInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorImageInfo( vk::Sampler sampler_ = vk::Sampler(),
- vk::ImageView imageView_ = vk::ImageView(),
- vk::ImageLayout imageLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {},
+ VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
: sampler( sampler_ )
, imageView( imageView_ )
, imageLayout( imageLayout_ )
@@ -28414,28 +27414,28 @@ namespace VULKAN_HPP_NAMESPACE
DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorImageInfo*>(this) = rhs;
+ *this = rhs;
}
DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorImageInfo*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>(&rhs);
return *this;
}
- DescriptorImageInfo & setSampler( vk::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
}
- DescriptorImageInfo & setImageView( vk::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- DescriptorImageInfo & setImageLayout( vk::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
{
imageLayout = imageLayout_;
return *this;
@@ -28464,33 +27464,33 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Sampler sampler;
- vk::ImageView imageView;
- vk::ImageLayout imageLayout;
+ VULKAN_HPP_NAMESPACE::Sampler sampler = {};
+ VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
struct DescriptorPoolSize
{
- VULKAN_HPP_CONSTEXPR DescriptorPoolSize( vk::DescriptorType type_ = vk::DescriptorType::eSampler,
- uint32_t descriptorCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, descriptorCount( descriptorCount_ )
{}
DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorPoolSize*>(this) = rhs;
+ *this = rhs;
}
DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorPoolSize*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>(&rhs);
return *this;
}
- DescriptorPoolSize & setType( vk::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
@@ -28524,65 +27524,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DescriptorType type;
- uint32_t descriptorCount;
+ VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ uint32_t descriptorCount = {};
};
static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DescriptorPoolCreateInfo
{
- struct DescriptorPoolCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlags flags_ = vk::DescriptorPoolCreateFlags(),
- uint32_t maxSets_ = 0,
- uint32_t poolSizeCount_ = 0,
- const vk::DescriptorPoolSize* pPoolSizes_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , maxSets( maxSets_ )
- , poolSizeCount( poolSizeCount_ )
- , pPoolSizes( pPoolSizes_ )
- {}
-
- DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorPoolCreateInfo*>(this) = rhs;
- }
-
- DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorPoolCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
- const void* pNext = nullptr;
- vk::DescriptorPoolCreateFlags flags;
- uint32_t maxSets;
- uint32_t poolSizeCount;
- const vk::DescriptorPoolSize* pPoolSizes;
- };
- static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorPoolCreateInfo : public layout::DescriptorPoolCreateInfo
- {
- VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( vk::DescriptorPoolCreateFlags flags_ = vk::DescriptorPoolCreateFlags(),
- uint32_t maxSets_ = 0,
- uint32_t poolSizeCount_ = 0,
- const vk::DescriptorPoolSize* pPoolSizes_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorPoolCreateInfo( flags_, maxSets_, poolSizeCount_, pPoolSizes_ )
+ VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {},
+ uint32_t maxSets_ = {},
+ uint32_t poolSizeCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , maxSets( maxSets_ )
+ , poolSizeCount( poolSizeCount_ )
+ , pPoolSizes( pPoolSizes_ )
{}
+ VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) - offsetof( DescriptorPoolCreateInfo, pNext ) );
+ return *this;
+ }
+
DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorPoolCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorPoolCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>(&rhs);
return *this;
}
@@ -28592,7 +27565,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorPoolCreateInfo & setFlags( vk::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -28610,7 +27583,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorPoolCreateInfo & setPPoolSizes( const vk::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
{
pPoolSizes = pPoolSizes_;
return *this;
@@ -28641,53 +27614,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DescriptorPoolCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
+ uint32_t maxSets = {};
+ uint32_t poolSizeCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {};
};
static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DescriptorPoolInlineUniformBlockCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 ) VULKAN_HPP_NOEXCEPT
- : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
- {}
-
- DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>(this) = rhs;
- }
-
- DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
- const void* pNext = nullptr;
- uint32_t maxInlineUniformBlockBindings;
- };
- static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorPoolInlineUniformBlockCreateInfoEXT : public layout::DescriptorPoolInlineUniformBlockCreateInfoEXT
+ struct DescriptorPoolInlineUniformBlockCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorPoolInlineUniformBlockCreateInfoEXT( maxInlineUniformBlockBindings_ )
+ VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
{}
+ VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorPoolInlineUniformBlockCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorPoolInlineUniformBlockCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -28725,61 +27682,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DescriptorPoolInlineUniformBlockCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
+ const void* pNext = {};
+ uint32_t maxInlineUniformBlockBindings = {};
};
static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DescriptorSetAllocateInfo
{
- struct DescriptorSetAllocateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( vk::DescriptorPool descriptorPool_ = vk::DescriptorPool(),
- uint32_t descriptorSetCount_ = 0,
- const vk::DescriptorSetLayout* pSetLayouts_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : descriptorPool( descriptorPool_ )
- , descriptorSetCount( descriptorSetCount_ )
- , pSetLayouts( pSetLayouts_ )
- {}
-
- DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetAllocateInfo*>(this) = rhs;
- }
-
- DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetAllocateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
- const void* pNext = nullptr;
- vk::DescriptorPool descriptorPool;
- uint32_t descriptorSetCount;
- const vk::DescriptorSetLayout* pSetLayouts;
- };
- static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorSetAllocateInfo : public layout::DescriptorSetAllocateInfo
- {
- VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( vk::DescriptorPool descriptorPool_ = vk::DescriptorPool(),
- uint32_t descriptorSetCount_ = 0,
- const vk::DescriptorSetLayout* pSetLayouts_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetAllocateInfo( descriptorPool_, descriptorSetCount_, pSetLayouts_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {},
+ uint32_t descriptorSetCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
+ : descriptorPool( descriptorPool_ )
+ , descriptorSetCount( descriptorSetCount_ )
+ , pSetLayouts( pSetLayouts_ )
{}
+ VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) - offsetof( DescriptorSetAllocateInfo, pNext ) );
+ return *this;
+ }
+
DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetAllocateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorSetAllocateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>(&rhs);
return *this;
}
@@ -28789,7 +27723,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorSetAllocateInfo & setDescriptorPool( vk::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
{
descriptorPool = descriptorPool_;
return *this;
@@ -28801,7 +27735,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorSetAllocateInfo & setPSetLayouts( const vk::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
@@ -28831,19 +27765,23 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DescriptorSetAllocateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
+ uint32_t descriptorSetCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
};
static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
struct DescriptorSetLayoutBinding
{
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = 0,
- vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler,
- uint32_t descriptorCount_ = 0,
- vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags(),
- const vk::Sampler* pImmutableSamplers_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ uint32_t descriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
+ const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
: binding( binding_ )
, descriptorType( descriptorType_ )
, descriptorCount( descriptorCount_ )
@@ -28853,12 +27791,12 @@ namespace VULKAN_HPP_NAMESPACE
DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorSetLayoutBinding*>(this) = rhs;
+ *this = rhs;
}
DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorSetLayoutBinding*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>(&rhs);
return *this;
}
@@ -28868,7 +27806,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorSetLayoutBinding & setDescriptorType( vk::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
@@ -28880,13 +27818,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorSetLayoutBinding & setStageFlags( vk::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
}
- DescriptorSetLayoutBinding & setPImmutableSamplers( const vk::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
{
pImmutableSamplers = pImmutableSamplers_;
return *this;
@@ -28917,92 +27855,69 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t binding;
- vk::DescriptorType descriptorType;
- uint32_t descriptorCount;
- vk::ShaderStageFlags stageFlags;
- const vk::Sampler* pImmutableSamplers;
+ uint32_t binding = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ uint32_t descriptorCount = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+ const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {};
};
static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DescriptorSetLayoutBindingFlagsCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0,
- const vk::DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : bindingCount( bindingCount_ )
- , pBindingFlags( pBindingFlags_ )
- {}
-
- DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>(this) = rhs;
- }
-
- DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT;
- const void* pNext = nullptr;
- uint32_t bindingCount;
- const vk::DescriptorBindingFlagsEXT* pBindingFlags;
- };
- static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorSetLayoutBindingFlagsCreateInfoEXT : public layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT
+ struct DescriptorSetLayoutBindingFlagsCreateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = 0,
- const vk::DescriptorBindingFlagsEXT* pBindingFlags_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT( bindingCount_, pBindingFlags_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : bindingCount( bindingCount_ )
+ , pBindingFlags( pBindingFlags_ )
{}
- DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfo, pNext ) );
+ return *this;
+ }
- DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT::operator=(rhs);
+ *this = rhs;
+ }
+
+ DescriptorSetLayoutBindingFlagsCreateInfo& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>(&rhs);
return *this;
}
- DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DescriptorSetLayoutBindingFlagsCreateInfoEXT & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
{
bindingCount = bindingCount_;
return *this;
}
- DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPBindingFlags( const vk::DescriptorBindingFlagsEXT* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
{
pBindingFlags = pBindingFlags_;
return *this;
}
- operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
- operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
}
- bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -29010,66 +27925,44 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pBindingFlags == rhs.pBindingFlags );
}
- bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::DescriptorSetLayoutBindingFlagsCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
+ const void* pNext = {};
+ uint32_t bindingCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {};
};
- static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct DescriptorSetLayoutCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags flags_ = vk::DescriptorSetLayoutCreateFlags(),
- uint32_t bindingCount_ = 0,
- const vk::DescriptorSetLayoutBinding* pBindings_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , bindingCount( bindingCount_ )
- , pBindings( pBindings_ )
- {}
-
- DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>(this) = rhs;
- }
-
- DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
- const void* pNext = nullptr;
- vk::DescriptorSetLayoutCreateFlags flags;
- uint32_t bindingCount;
- const vk::DescriptorSetLayoutBinding* pBindings;
- };
- static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct DescriptorSetLayoutCreateInfo : public layout::DescriptorSetLayoutCreateInfo
+ struct DescriptorSetLayoutCreateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( vk::DescriptorSetLayoutCreateFlags flags_ = vk::DescriptorSetLayoutCreateFlags(),
- uint32_t bindingCount_ = 0,
- const vk::DescriptorSetLayoutBinding* pBindings_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetLayoutCreateInfo( flags_, bindingCount_, pBindings_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {},
+ uint32_t bindingCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , bindingCount( bindingCount_ )
+ , pBindings( pBindings_ )
{}
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) );
+ return *this;
+ }
+
DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetLayoutCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorSetLayoutCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>(&rhs);
return *this;
}
@@ -29079,7 +27972,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorSetLayoutCreateInfo & setFlags( vk::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -29091,7 +27984,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorSetLayoutCreateInfo & setPBindings( const vk::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
{
pBindings = pBindings_;
return *this;
@@ -29121,52 +28014,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DescriptorSetLayoutCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
+ uint32_t bindingCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {};
};
static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DescriptorSetLayoutSupport
- {
- protected:
- DescriptorSetLayoutSupport() VULKAN_HPP_NOEXCEPT
- {}
-
- DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetLayoutSupport*>(this) = rhs;
- }
-
- DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetLayoutSupport*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
- void* pNext = nullptr;
- vk::Bool32 supported;
- };
- static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorSetLayoutSupport : public layout::DescriptorSetLayoutSupport
+ struct DescriptorSetLayoutSupport
{
- DescriptorSetLayoutSupport() VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetLayoutSupport()
+ DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT
+ : supported( supported_ )
{}
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) );
+ return *this;
+ }
+
DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetLayoutSupport( rhs )
- {}
+ {
+ *this = rhs;
+ }
DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorSetLayoutSupport::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>(&rhs);
return *this;
}
@@ -29192,89 +28069,68 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DescriptorSetLayoutSupport::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 supported = {};
};
static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DescriptorSetVariableDescriptorCountAllocateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0,
- const uint32_t* pDescriptorCounts_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : descriptorSetCount( descriptorSetCount_ )
- , pDescriptorCounts( pDescriptorCounts_ )
- {}
-
- DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>(this) = rhs;
- }
-
- DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT;
- const void* pNext = nullptr;
- uint32_t descriptorSetCount;
- const uint32_t* pDescriptorCounts;
- };
- static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorSetVariableDescriptorCountAllocateInfoEXT : public layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT
+ struct DescriptorSetVariableDescriptorCountAllocateInfo
{
- VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = 0,
- const uint32_t* pDescriptorCounts_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT( descriptorSetCount_, pDescriptorCounts_ )
+ VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {},
+ const uint32_t* pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT
+ : descriptorSetCount( descriptorSetCount_ )
+ , pDescriptorCounts( pDescriptorCounts_ )
{}
- DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfo, pNext ) );
+ return *this;
+ }
- DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT::operator=(rhs);
+ *this = rhs;
+ }
+
+ DescriptorSetVariableDescriptorCountAllocateInfo& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>(&rhs);
return *this;
}
- DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- DescriptorSetVariableDescriptorCountAllocateInfoEXT & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetCount = descriptorSetCount_;
return *this;
}
- DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorCounts = pDescriptorCounts_;
return *this;
}
- operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>( this );
+ return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
- operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfoEXT*>( this );
+ return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
}
- bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -29282,96 +28138,81 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pDescriptorCounts == rhs.pDescriptorCounts );
}
- bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::DescriptorSetVariableDescriptorCountAllocateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
+ const void* pNext = {};
+ uint32_t descriptorSetCount = {};
+ const uint32_t* pDescriptorCounts = {};
};
- static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DescriptorSetVariableDescriptorCountLayoutSupport
{
- struct DescriptorSetVariableDescriptorCountLayoutSupportEXT
- {
- protected:
- DescriptorSetVariableDescriptorCountLayoutSupportEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- DescriptorSetVariableDescriptorCountLayoutSupportEXT( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>(this) = rhs;
- }
-
- DescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT;
- void* pNext = nullptr;
- uint32_t maxVariableDescriptorCount;
- };
- static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorSetVariableDescriptorCountLayoutSupportEXT : public layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT
- {
- DescriptorSetVariableDescriptorCountLayoutSupportEXT() VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT()
+ DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
{}
- DescriptorSetVariableDescriptorCountLayoutSupportEXT( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupport, pNext ) );
+ return *this;
+ }
- DescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT::operator=(rhs);
+ *this = rhs;
+ }
+
+ DescriptorSetVariableDescriptorCountLayoutSupport& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>(&rhs);
return *this;
}
- operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>( this );
+ return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
- operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupportEXT*>( this );
+ return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
}
- bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
}
- bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::DescriptorSetVariableDescriptorCountLayoutSupportEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
+ void* pNext = {};
+ uint32_t maxVariableDescriptorCount = {};
};
- static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupportEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
struct DescriptorUpdateTemplateEntry
{
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = 0,
- uint32_t dstArrayElement_ = 0,
- uint32_t descriptorCount_ = 0,
- vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler,
- size_t offset_ = 0,
- size_t stride_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {},
+ uint32_t dstArrayElement_ = {},
+ uint32_t descriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ size_t offset_ = {},
+ size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
: dstBinding( dstBinding_ )
, dstArrayElement( dstArrayElement_ )
, descriptorCount( descriptorCount_ )
@@ -29382,12 +28223,12 @@ namespace VULKAN_HPP_NAMESPACE
DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>(this) = rhs;
+ *this = rhs;
}
DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>(&rhs);
return *this;
}
@@ -29409,7 +28250,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorUpdateTemplateEntry & setDescriptorType( vk::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
@@ -29453,85 +28294,50 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t dstBinding;
- uint32_t dstArrayElement;
- uint32_t descriptorCount;
- vk::DescriptorType descriptorType;
- size_t offset;
- size_t stride;
+ uint32_t dstBinding = {};
+ uint32_t dstArrayElement = {};
+ uint32_t descriptorCount = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ size_t offset = {};
+ size_t stride = {};
};
static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DescriptorUpdateTemplateCreateInfo
{
- struct DescriptorUpdateTemplateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( vk::DescriptorUpdateTemplateCreateFlags flags_ = vk::DescriptorUpdateTemplateCreateFlags(),
- uint32_t descriptorUpdateEntryCount_ = 0,
- const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr,
- vk::DescriptorUpdateTemplateType templateType_ = vk::DescriptorUpdateTemplateType::eDescriptorSet,
- vk::DescriptorSetLayout descriptorSetLayout_ = vk::DescriptorSetLayout(),
- vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(),
- uint32_t set_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
- , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
- , templateType( templateType_ )
- , descriptorSetLayout( descriptorSetLayout_ )
- , pipelineBindPoint( pipelineBindPoint_ )
- , pipelineLayout( pipelineLayout_ )
- , set( set_ )
- {}
-
- DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>(this) = rhs;
- }
-
- DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
- const void* pNext = nullptr;
- vk::DescriptorUpdateTemplateCreateFlags flags;
- uint32_t descriptorUpdateEntryCount;
- const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries;
- vk::DescriptorUpdateTemplateType templateType;
- vk::DescriptorSetLayout descriptorSetLayout;
- vk::PipelineBindPoint pipelineBindPoint;
- vk::PipelineLayout pipelineLayout;
- uint32_t set;
- };
- static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DescriptorUpdateTemplateCreateInfo : public layout::DescriptorUpdateTemplateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( vk::DescriptorUpdateTemplateCreateFlags flags_ = vk::DescriptorUpdateTemplateCreateFlags(),
- uint32_t descriptorUpdateEntryCount_ = 0,
- const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = nullptr,
- vk::DescriptorUpdateTemplateType templateType_ = vk::DescriptorUpdateTemplateType::eDescriptorSet,
- vk::DescriptorSetLayout descriptorSetLayout_ = vk::DescriptorSetLayout(),
- vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(),
- uint32_t set_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorUpdateTemplateCreateInfo( flags_, descriptorUpdateEntryCount_, pDescriptorUpdateEntries_, templateType_, descriptorSetLayout_, pipelineBindPoint_, pipelineLayout_, set_ )
+ VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {},
+ uint32_t descriptorUpdateEntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
+ , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
+ , templateType( templateType_ )
+ , descriptorSetLayout( descriptorSetLayout_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
+ , pipelineLayout( pipelineLayout_ )
+ , set( set_ )
{}
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) );
+ return *this;
+ }
+
DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DescriptorUpdateTemplateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DescriptorUpdateTemplateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>(&rhs);
return *this;
}
@@ -29541,7 +28347,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorUpdateTemplateCreateInfo & setFlags( vk::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -29553,31 +28359,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const vk::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
{
pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
return *this;
}
- DescriptorUpdateTemplateCreateInfo & setTemplateType( vk::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
{
templateType = templateType_;
return *this;
}
- DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSetLayout = descriptorSetLayout_;
return *this;
}
- DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- DescriptorUpdateTemplateCreateInfo & setPipelineLayout( vk::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLayout = pipelineLayout_;
return *this;
@@ -29618,65 +28424,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DescriptorUpdateTemplateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
+ uint32_t descriptorUpdateEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {};
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+ uint32_t set = {};
};
static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceQueueCreateInfo
{
- struct DeviceQueueCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(),
- uint32_t queueFamilyIndex_ = 0,
- uint32_t queueCount_ = 0,
- const float* pQueuePriorities_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueFamilyIndex( queueFamilyIndex_ )
- , queueCount( queueCount_ )
- , pQueuePriorities( pQueuePriorities_ )
- {}
-
- DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceQueueCreateInfo*>(this) = rhs;
- }
-
- DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceQueueCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceQueueCreateInfo;
- const void* pNext = nullptr;
- vk::DeviceQueueCreateFlags flags;
- uint32_t queueFamilyIndex;
- uint32_t queueCount;
- const float* pQueuePriorities;
- };
- static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceQueueCreateInfo : public layout::DeviceQueueCreateInfo
- {
- VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(),
- uint32_t queueFamilyIndex_ = 0,
- uint32_t queueCount_ = 0,
- const float* pQueuePriorities_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceQueueCreateInfo( flags_, queueFamilyIndex_, queueCount_, pQueuePriorities_ )
+ VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
+ uint32_t queueFamilyIndex_ = {},
+ uint32_t queueCount_ = {},
+ const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
+ , queueCount( queueCount_ )
+ , pQueuePriorities( pQueuePriorities_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) );
+ return *this;
+ }
+
DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceQueueCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceQueueCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>(&rhs);
return *this;
}
@@ -29686,7 +28474,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceQueueCreateInfo & setFlags( vk::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -29735,69 +28523,74 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceQueueCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+ uint32_t queueFamilyIndex = {};
+ uint32_t queueCount = {};
+ const float* pQueuePriorities = {};
};
static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( vk::Bool32 robustBufferAccess_ = 0,
- vk::Bool32 fullDrawIndexUint32_ = 0,
- vk::Bool32 imageCubeArray_ = 0,
- vk::Bool32 independentBlend_ = 0,
- vk::Bool32 geometryShader_ = 0,
- vk::Bool32 tessellationShader_ = 0,
- vk::Bool32 sampleRateShading_ = 0,
- vk::Bool32 dualSrcBlend_ = 0,
- vk::Bool32 logicOp_ = 0,
- vk::Bool32 multiDrawIndirect_ = 0,
- vk::Bool32 drawIndirectFirstInstance_ = 0,
- vk::Bool32 depthClamp_ = 0,
- vk::Bool32 depthBiasClamp_ = 0,
- vk::Bool32 fillModeNonSolid_ = 0,
- vk::Bool32 depthBounds_ = 0,
- vk::Bool32 wideLines_ = 0,
- vk::Bool32 largePoints_ = 0,
- vk::Bool32 alphaToOne_ = 0,
- vk::Bool32 multiViewport_ = 0,
- vk::Bool32 samplerAnisotropy_ = 0,
- vk::Bool32 textureCompressionETC2_ = 0,
- vk::Bool32 textureCompressionASTC_LDR_ = 0,
- vk::Bool32 textureCompressionBC_ = 0,
- vk::Bool32 occlusionQueryPrecise_ = 0,
- vk::Bool32 pipelineStatisticsQuery_ = 0,
- vk::Bool32 vertexPipelineStoresAndAtomics_ = 0,
- vk::Bool32 fragmentStoresAndAtomics_ = 0,
- vk::Bool32 shaderTessellationAndGeometryPointSize_ = 0,
- vk::Bool32 shaderImageGatherExtended_ = 0,
- vk::Bool32 shaderStorageImageExtendedFormats_ = 0,
- vk::Bool32 shaderStorageImageMultisample_ = 0,
- vk::Bool32 shaderStorageImageReadWithoutFormat_ = 0,
- vk::Bool32 shaderStorageImageWriteWithoutFormat_ = 0,
- vk::Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderSampledImageArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderStorageImageArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderClipDistance_ = 0,
- vk::Bool32 shaderCullDistance_ = 0,
- vk::Bool32 shaderFloat64_ = 0,
- vk::Bool32 shaderInt64_ = 0,
- vk::Bool32 shaderInt16_ = 0,
- vk::Bool32 shaderResourceResidency_ = 0,
- vk::Bool32 shaderResourceMinLod_ = 0,
- vk::Bool32 sparseBinding_ = 0,
- vk::Bool32 sparseResidencyBuffer_ = 0,
- vk::Bool32 sparseResidencyImage2D_ = 0,
- vk::Bool32 sparseResidencyImage3D_ = 0,
- vk::Bool32 sparseResidency2Samples_ = 0,
- vk::Bool32 sparseResidency4Samples_ = 0,
- vk::Bool32 sparseResidency8Samples_ = 0,
- vk::Bool32 sparseResidency16Samples_ = 0,
- vk::Bool32 sparseResidencyAliased_ = 0,
- vk::Bool32 variableMultisampleRate_ = 0,
- vk::Bool32 inheritedQueries_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
: robustBufferAccess( robustBufferAccess_ )
, fullDrawIndexUint32( fullDrawIndexUint32_ )
, imageCubeArray( imageCubeArray_ )
@@ -29857,340 +28650,340 @@ namespace VULKAN_HPP_NAMESPACE
PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceFeatures*>(this) = rhs;
+ *this = rhs;
}
PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceFeatures*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceFeatures & setRobustBufferAccess( vk::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
robustBufferAccess = robustBufferAccess_;
return *this;
}
- PhysicalDeviceFeatures & setFullDrawIndexUint32( vk::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
{
fullDrawIndexUint32 = fullDrawIndexUint32_;
return *this;
}
- PhysicalDeviceFeatures & setImageCubeArray( vk::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
{
imageCubeArray = imageCubeArray_;
return *this;
}
- PhysicalDeviceFeatures & setIndependentBlend( vk::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
{
independentBlend = independentBlend_;
return *this;
}
- PhysicalDeviceFeatures & setGeometryShader( vk::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
{
geometryShader = geometryShader_;
return *this;
}
- PhysicalDeviceFeatures & setTessellationShader( vk::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
tessellationShader = tessellationShader_;
return *this;
}
- PhysicalDeviceFeatures & setSampleRateShading( vk::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
{
sampleRateShading = sampleRateShading_;
return *this;
}
- PhysicalDeviceFeatures & setDualSrcBlend( vk::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
{
dualSrcBlend = dualSrcBlend_;
return *this;
}
- PhysicalDeviceFeatures & setLogicOp( vk::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
}
- PhysicalDeviceFeatures & setMultiDrawIndirect( vk::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
{
multiDrawIndirect = multiDrawIndirect_;
return *this;
}
- PhysicalDeviceFeatures & setDrawIndirectFirstInstance( vk::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
{
drawIndirectFirstInstance = drawIndirectFirstInstance_;
return *this;
}
- PhysicalDeviceFeatures & setDepthClamp( vk::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthClamp = depthClamp_;
return *this;
}
- PhysicalDeviceFeatures & setDepthBiasClamp( vk::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasClamp = depthBiasClamp_;
return *this;
}
- PhysicalDeviceFeatures & setFillModeNonSolid( vk::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
{
fillModeNonSolid = fillModeNonSolid_;
return *this;
}
- PhysicalDeviceFeatures & setDepthBounds( vk::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
{
depthBounds = depthBounds_;
return *this;
}
- PhysicalDeviceFeatures & setWideLines( vk::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
{
wideLines = wideLines_;
return *this;
}
- PhysicalDeviceFeatures & setLargePoints( vk::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
{
largePoints = largePoints_;
return *this;
}
- PhysicalDeviceFeatures & setAlphaToOne( vk::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOne = alphaToOne_;
return *this;
}
- PhysicalDeviceFeatures & setMultiViewport( vk::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
{
multiViewport = multiViewport_;
return *this;
}
- PhysicalDeviceFeatures & setSamplerAnisotropy( vk::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
{
samplerAnisotropy = samplerAnisotropy_;
return *this;
}
- PhysicalDeviceFeatures & setTextureCompressionETC2( vk::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionETC2 = textureCompressionETC2_;
return *this;
}
- PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( vk::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
return *this;
}
- PhysicalDeviceFeatures & setTextureCompressionBC( vk::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionBC = textureCompressionBC_;
return *this;
}
- PhysicalDeviceFeatures & setOcclusionQueryPrecise( vk::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
{
occlusionQueryPrecise = occlusionQueryPrecise_;
return *this;
}
- PhysicalDeviceFeatures & setPipelineStatisticsQuery( vk::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatisticsQuery = pipelineStatisticsQuery_;
return *this;
}
- PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( vk::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
return *this;
}
- PhysicalDeviceFeatures & setFragmentStoresAndAtomics( vk::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
{
fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
return *this;
}
- PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( vk::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
{
shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
return *this;
}
- PhysicalDeviceFeatures & setShaderImageGatherExtended( vk::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
{
shaderImageGatherExtended = shaderImageGatherExtended_;
return *this;
}
- PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( vk::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
return *this;
}
- PhysicalDeviceFeatures & setShaderStorageImageMultisample( vk::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageMultisample = shaderStorageImageMultisample_;
return *this;
}
- PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( vk::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
return *this;
}
- PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( vk::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
return *this;
}
- PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( vk::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( vk::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( vk::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( vk::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceFeatures & setShaderClipDistance( vk::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderClipDistance = shaderClipDistance_;
return *this;
}
- PhysicalDeviceFeatures & setShaderCullDistance( vk::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
{
shaderCullDistance = shaderCullDistance_;
return *this;
}
- PhysicalDeviceFeatures & setShaderFloat64( vk::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat64 = shaderFloat64_;
return *this;
}
- PhysicalDeviceFeatures & setShaderInt64( vk::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt64 = shaderInt64_;
return *this;
}
- PhysicalDeviceFeatures & setShaderInt16( vk::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt16 = shaderInt16_;
return *this;
}
- PhysicalDeviceFeatures & setShaderResourceResidency( vk::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceResidency = shaderResourceResidency_;
return *this;
}
- PhysicalDeviceFeatures & setShaderResourceMinLod( vk::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
{
shaderResourceMinLod = shaderResourceMinLod_;
return *this;
}
- PhysicalDeviceFeatures & setSparseBinding( vk::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
{
sparseBinding = sparseBinding_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidencyBuffer( vk::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyBuffer = sparseResidencyBuffer_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidencyImage2D( vk::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage2D = sparseResidencyImage2D_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidencyImage3D( vk::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyImage3D = sparseResidencyImage3D_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidency2Samples( vk::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency2Samples = sparseResidency2Samples_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidency4Samples( vk::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency4Samples = sparseResidency4Samples_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidency8Samples( vk::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency8Samples = sparseResidency8Samples_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidency16Samples( vk::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidency16Samples = sparseResidency16Samples_;
return *this;
}
- PhysicalDeviceFeatures & setSparseResidencyAliased( vk::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
{
sparseResidencyAliased = sparseResidencyAliased_;
return *this;
}
- PhysicalDeviceFeatures & setVariableMultisampleRate( vk::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
{
variableMultisampleRate = variableMultisampleRate_;
return *this;
}
- PhysicalDeviceFeatures & setInheritedQueries( vk::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
{
inheritedQueries = inheritedQueries_;
return *this;
@@ -30271,134 +29064,99 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Bool32 robustBufferAccess;
- vk::Bool32 fullDrawIndexUint32;
- vk::Bool32 imageCubeArray;
- vk::Bool32 independentBlend;
- vk::Bool32 geometryShader;
- vk::Bool32 tessellationShader;
- vk::Bool32 sampleRateShading;
- vk::Bool32 dualSrcBlend;
- vk::Bool32 logicOp;
- vk::Bool32 multiDrawIndirect;
- vk::Bool32 drawIndirectFirstInstance;
- vk::Bool32 depthClamp;
- vk::Bool32 depthBiasClamp;
- vk::Bool32 fillModeNonSolid;
- vk::Bool32 depthBounds;
- vk::Bool32 wideLines;
- vk::Bool32 largePoints;
- vk::Bool32 alphaToOne;
- vk::Bool32 multiViewport;
- vk::Bool32 samplerAnisotropy;
- vk::Bool32 textureCompressionETC2;
- vk::Bool32 textureCompressionASTC_LDR;
- vk::Bool32 textureCompressionBC;
- vk::Bool32 occlusionQueryPrecise;
- vk::Bool32 pipelineStatisticsQuery;
- vk::Bool32 vertexPipelineStoresAndAtomics;
- vk::Bool32 fragmentStoresAndAtomics;
- vk::Bool32 shaderTessellationAndGeometryPointSize;
- vk::Bool32 shaderImageGatherExtended;
- vk::Bool32 shaderStorageImageExtendedFormats;
- vk::Bool32 shaderStorageImageMultisample;
- vk::Bool32 shaderStorageImageReadWithoutFormat;
- vk::Bool32 shaderStorageImageWriteWithoutFormat;
- vk::Bool32 shaderUniformBufferArrayDynamicIndexing;
- vk::Bool32 shaderSampledImageArrayDynamicIndexing;
- vk::Bool32 shaderStorageBufferArrayDynamicIndexing;
- vk::Bool32 shaderStorageImageArrayDynamicIndexing;
- vk::Bool32 shaderClipDistance;
- vk::Bool32 shaderCullDistance;
- vk::Bool32 shaderFloat64;
- vk::Bool32 shaderInt64;
- vk::Bool32 shaderInt16;
- vk::Bool32 shaderResourceResidency;
- vk::Bool32 shaderResourceMinLod;
- vk::Bool32 sparseBinding;
- vk::Bool32 sparseResidencyBuffer;
- vk::Bool32 sparseResidencyImage2D;
- vk::Bool32 sparseResidencyImage3D;
- vk::Bool32 sparseResidency2Samples;
- vk::Bool32 sparseResidency4Samples;
- vk::Bool32 sparseResidency8Samples;
- vk::Bool32 sparseResidency16Samples;
- vk::Bool32 sparseResidencyAliased;
- vk::Bool32 variableMultisampleRate;
- vk::Bool32 inheritedQueries;
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
+ VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
+ VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
+ VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
+ VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
+ VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
+ VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
};
static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceCreateInfo( vk::DeviceCreateFlags flags_ = vk::DeviceCreateFlags(),
- uint32_t queueCreateInfoCount_ = 0,
- const vk::DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr,
- uint32_t enabledLayerCount_ = 0,
- const char* const* ppEnabledLayerNames_ = nullptr,
- uint32_t enabledExtensionCount_ = 0,
- const char* const* ppEnabledExtensionNames_ = nullptr,
- const vk::PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueCreateInfoCount( queueCreateInfoCount_ )
- , pQueueCreateInfos( pQueueCreateInfos_ )
- , enabledLayerCount( enabledLayerCount_ )
- , ppEnabledLayerNames( ppEnabledLayerNames_ )
- , enabledExtensionCount( enabledExtensionCount_ )
- , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
- , pEnabledFeatures( pEnabledFeatures_ )
- {}
-
- DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceCreateInfo*>(this) = rhs;
- }
-
- DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceCreateInfo;
- const void* pNext = nullptr;
- vk::DeviceCreateFlags flags;
- uint32_t queueCreateInfoCount;
- const vk::DeviceQueueCreateInfo* pQueueCreateInfos;
- uint32_t enabledLayerCount;
- const char* const* ppEnabledLayerNames;
- uint32_t enabledExtensionCount;
- const char* const* ppEnabledExtensionNames;
- const vk::PhysicalDeviceFeatures* pEnabledFeatures;
- };
- static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceCreateInfo : public layout::DeviceCreateInfo
+ struct DeviceCreateInfo
{
- VULKAN_HPP_CONSTEXPR DeviceCreateInfo( vk::DeviceCreateFlags flags_ = vk::DeviceCreateFlags(),
- uint32_t queueCreateInfoCount_ = 0,
- const vk::DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr,
- uint32_t enabledLayerCount_ = 0,
- const char* const* ppEnabledLayerNames_ = nullptr,
- uint32_t enabledExtensionCount_ = 0,
- const char* const* ppEnabledExtensionNames_ = nullptr,
- const vk::PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceCreateInfo( flags_, queueCreateInfoCount_, pQueueCreateInfos_, enabledLayerCount_, ppEnabledLayerNames_, enabledExtensionCount_, ppEnabledExtensionNames_, pEnabledFeatures_ )
+ VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {},
+ uint32_t queueCreateInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {},
+ uint32_t enabledLayerCount_ = {},
+ const char* const* ppEnabledLayerNames_ = {},
+ uint32_t enabledExtensionCount_ = {},
+ const char* const* ppEnabledExtensionNames_ = {},
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , queueCreateInfoCount( queueCreateInfoCount_ )
+ , pQueueCreateInfos( pQueueCreateInfos_ )
+ , enabledLayerCount( enabledLayerCount_ )
+ , ppEnabledLayerNames( ppEnabledLayerNames_ )
+ , enabledExtensionCount( enabledExtensionCount_ )
+ , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
+ , pEnabledFeatures( pEnabledFeatures_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) );
+ return *this;
+ }
+
DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>(&rhs);
return *this;
}
@@ -30408,7 +29166,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceCreateInfo & setFlags( vk::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -30420,7 +29178,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceCreateInfo & setPQueueCreateInfos( const vk::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
{
pQueueCreateInfos = pQueueCreateInfos_;
return *this;
@@ -30450,7 +29208,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceCreateInfo & setPEnabledFeatures( const vk::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
+ DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledFeatures = pEnabledFeatures_;
return *this;
@@ -30485,53 +29243,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
+ uint32_t queueCreateInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
+ uint32_t enabledLayerCount = {};
+ const char* const* ppEnabledLayerNames = {};
+ uint32_t enabledExtensionCount = {};
+ const char* const* ppEnabledExtensionNames = {};
+ const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
};
static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceEventInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( vk::DeviceEventTypeEXT deviceEvent_ = vk::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
- : deviceEvent( deviceEvent_ )
- {}
-
- DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceEventInfoEXT*>(this) = rhs;
- }
-
- DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceEventInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceEventInfoEXT;
- const void* pNext = nullptr;
- vk::DeviceEventTypeEXT deviceEvent;
- };
- static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceEventInfoEXT : public layout::DeviceEventInfoEXT
+ struct DeviceEventInfoEXT
{
- VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( vk::DeviceEventTypeEXT deviceEvent_ = vk::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceEventInfoEXT( deviceEvent_ )
+ VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
+ : deviceEvent( deviceEvent_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) );
+ return *this;
+ }
+
DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceEventInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceEventInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>(&rhs);
return *this;
}
@@ -30541,7 +29287,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceEventInfoEXT & setDeviceEvent( vk::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
+ DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
{
deviceEvent = deviceEvent_;
return *this;
@@ -30569,53 +29315,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceEventInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
};
static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceGeneratedCommandsFeaturesNVX
{
- struct DeviceGeneratedCommandsFeaturesNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( vk::Bool32 computeBindingPointSupport_ = 0 ) VULKAN_HPP_NOEXCEPT
- : computeBindingPointSupport( computeBindingPointSupport_ )
- {}
-
- DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>(this) = rhs;
- }
-
- DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
- const void* pNext = nullptr;
- vk::Bool32 computeBindingPointSupport;
- };
- static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGeneratedCommandsFeaturesNVX : public layout::DeviceGeneratedCommandsFeaturesNVX
- {
- VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( vk::Bool32 computeBindingPointSupport_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGeneratedCommandsFeaturesNVX( computeBindingPointSupport_ )
+ VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = {} ) VULKAN_HPP_NOEXCEPT
+ : computeBindingPointSupport( computeBindingPointSupport_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) );
+ return *this;
+ }
+
DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGeneratedCommandsFeaturesNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGeneratedCommandsFeaturesNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX const *>(&rhs);
return *this;
}
@@ -30625,7 +29352,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( vk::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT
{
computeBindingPointSupport = computeBindingPointSupport_;
return *this;
@@ -30653,69 +29380,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGeneratedCommandsFeaturesNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport = {};
};
static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGeneratedCommandsFeaturesNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceGeneratedCommandsLimitsNVX
{
- struct DeviceGeneratedCommandsLimitsNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0,
- uint32_t maxObjectEntryCounts_ = 0,
- uint32_t minSequenceCountBufferOffsetAlignment_ = 0,
- uint32_t minSequenceIndexBufferOffsetAlignment_ = 0,
- uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
- : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
- , maxObjectEntryCounts( maxObjectEntryCounts_ )
- , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
- , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
- , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
- {}
-
- DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>(this) = rhs;
- }
-
- DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
- const void* pNext = nullptr;
- uint32_t maxIndirectCommandsLayoutTokenCount;
- uint32_t maxObjectEntryCounts;
- uint32_t minSequenceCountBufferOffsetAlignment;
- uint32_t minSequenceIndexBufferOffsetAlignment;
- uint32_t minCommandsTokenBufferOffsetAlignment;
- };
- static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGeneratedCommandsLimitsNVX : public layout::DeviceGeneratedCommandsLimitsNVX
- {
- VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0,
- uint32_t maxObjectEntryCounts_ = 0,
- uint32_t minSequenceCountBufferOffsetAlignment_ = 0,
- uint32_t minSequenceIndexBufferOffsetAlignment_ = 0,
- uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGeneratedCommandsLimitsNVX( maxIndirectCommandsLayoutTokenCount_, maxObjectEntryCounts_, minSequenceCountBufferOffsetAlignment_, minSequenceIndexBufferOffsetAlignment_, minCommandsTokenBufferOffsetAlignment_ )
+ VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = {},
+ uint32_t maxObjectEntryCounts_ = {},
+ uint32_t minSequenceCountBufferOffsetAlignment_ = {},
+ uint32_t minSequenceIndexBufferOffsetAlignment_ = {},
+ uint32_t minCommandsTokenBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
+ , maxObjectEntryCounts( maxObjectEntryCounts_ )
+ , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
+ , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
+ , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX & operator=( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) );
+ return *this;
+ }
+
DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGeneratedCommandsLimitsNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGeneratedCommandsLimitsNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX const *>(&rhs);
return *this;
}
@@ -30781,57 +29481,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGeneratedCommandsLimitsNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX;
+ const void* pNext = {};
+ uint32_t maxIndirectCommandsLayoutTokenCount = {};
+ uint32_t maxObjectEntryCounts = {};
+ uint32_t minSequenceCountBufferOffsetAlignment = {};
+ uint32_t minSequenceIndexBufferOffsetAlignment = {};
+ uint32_t minCommandsTokenBufferOffsetAlignment = {};
};
static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGeneratedCommandsLimitsNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceGroupBindSparseInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0,
- uint32_t memoryDeviceIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : resourceDeviceIndex( resourceDeviceIndex_ )
- , memoryDeviceIndex( memoryDeviceIndex_ )
- {}
-
- DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupBindSparseInfo*>(this) = rhs;
- }
-
- DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupBindSparseInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
- const void* pNext = nullptr;
- uint32_t resourceDeviceIndex;
- uint32_t memoryDeviceIndex;
- };
- static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupBindSparseInfo : public layout::DeviceGroupBindSparseInfo
+ struct DeviceGroupBindSparseInfo
{
- VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = 0,
- uint32_t memoryDeviceIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupBindSparseInfo( resourceDeviceIndex_, memoryDeviceIndex_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {},
+ uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : resourceDeviceIndex( resourceDeviceIndex_ )
+ , memoryDeviceIndex( memoryDeviceIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) );
+ return *this;
+ }
+
DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupBindSparseInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupBindSparseInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>(&rhs);
return *this;
}
@@ -30876,53 +29559,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupBindSparseInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
+ const void* pNext = {};
+ uint32_t resourceDeviceIndex = {};
+ uint32_t memoryDeviceIndex = {};
};
static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceGroupCommandBufferBeginInfo
{
- struct DeviceGroupCommandBufferBeginInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
- : deviceMask( deviceMask_ )
- {}
-
- DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>(this) = rhs;
- }
-
- DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
- const void* pNext = nullptr;
- uint32_t deviceMask;
- };
- static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupCommandBufferBeginInfo : public layout::DeviceGroupCommandBufferBeginInfo
- {
- VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupCommandBufferBeginInfo( deviceMask_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceMask( deviceMask_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) );
+ return *this;
+ }
+
DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupCommandBufferBeginInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupCommandBufferBeginInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>(&rhs);
return *this;
}
@@ -30960,57 +29625,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupCommandBufferBeginInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
+ const void* pNext = {};
+ uint32_t deviceMask = {};
};
static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceGroupDeviceCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0,
- const vk::PhysicalDevice* pPhysicalDevices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : physicalDeviceCount( physicalDeviceCount_ )
- , pPhysicalDevices( pPhysicalDevices_ )
- {}
-
- DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>(this) = rhs;
- }
-
- DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
- const void* pNext = nullptr;
- uint32_t physicalDeviceCount;
- const vk::PhysicalDevice* pPhysicalDevices;
- };
- static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupDeviceCreateInfo : public layout::DeviceGroupDeviceCreateInfo
+ struct DeviceGroupDeviceCreateInfo
{
- VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = 0,
- const vk::PhysicalDevice* pPhysicalDevices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupDeviceCreateInfo( physicalDeviceCount_, pPhysicalDevices_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : physicalDeviceCount( physicalDeviceCount_ )
+ , pPhysicalDevices( pPhysicalDevices_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) );
+ return *this;
+ }
+
DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupDeviceCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupDeviceCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>(&rhs);
return *this;
}
@@ -31026,7 +29670,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const vk::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
{
pPhysicalDevices = pPhysicalDevices_;
return *this;
@@ -31055,53 +29699,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupDeviceCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
+ const void* pNext = {};
+ uint32_t physicalDeviceCount = {};
+ const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {};
};
static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceGroupPresentCapabilitiesKHR
{
- struct DeviceGroupPresentCapabilitiesKHR
+ DeviceGroupPresentCapabilitiesKHR( std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : presentMask{}
+ , modes( modes_ )
{
- protected:
- DeviceGroupPresentCapabilitiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>(this) = rhs;
- }
-
- DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
- const void* pNext = nullptr;
- uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE];
- vk::DeviceGroupPresentModeFlagsKHR modes;
- };
- static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,VK_MAX_DEVICE_GROUP_SIZE,VK_MAX_DEVICE_GROUP_SIZE>::copy( presentMask, presentMask_ );
+ }
- struct DeviceGroupPresentCapabilitiesKHR : public layout::DeviceGroupPresentCapabilitiesKHR
- {
- DeviceGroupPresentCapabilitiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupPresentCapabilitiesKHR()
- {}
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) );
+ return *this;
+ }
DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupPresentCapabilitiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupPresentCapabilitiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>(&rhs);
return *this;
}
@@ -31128,61 +29758,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupPresentCapabilitiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
+ const void* pNext = {};
+ uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceGroupPresentInfoKHR
{
- struct DeviceGroupPresentInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0,
- const uint32_t* pDeviceMasks_ = nullptr,
- vk::DeviceGroupPresentModeFlagBitsKHR mode_ = vk::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
- , pDeviceMasks( pDeviceMasks_ )
- , mode( mode_ )
- {}
-
- DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>(this) = rhs;
- }
-
- DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
- const void* pNext = nullptr;
- uint32_t swapchainCount;
- const uint32_t* pDeviceMasks;
- vk::DeviceGroupPresentModeFlagBitsKHR mode;
- };
- static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupPresentInfoKHR : public layout::DeviceGroupPresentInfoKHR
- {
- VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = 0,
- const uint32_t* pDeviceMasks_ = nullptr,
- vk::DeviceGroupPresentModeFlagBitsKHR mode_ = vk::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupPresentInfoKHR( swapchainCount_, pDeviceMasks_, mode_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {},
+ const uint32_t* pDeviceMasks_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
+ : swapchainCount( swapchainCount_ )
+ , pDeviceMasks( pDeviceMasks_ )
+ , mode( mode_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) );
+ return *this;
+ }
+
DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupPresentInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupPresentInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>(&rhs);
return *this;
}
@@ -31204,7 +29812,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceGroupPresentInfoKHR & setMode( vk::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
@@ -31226,7 +29834,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( swapchainCount == rhs.swapchainCount )
&& ( pDeviceMasks == rhs.pDeviceMasks )
- && vk::operator==( mode, rhs.mode );
+ && ( mode == rhs.mode );
}
bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -31234,61 +29842,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupPresentInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
+ const void* pNext = {};
+ uint32_t swapchainCount = {};
+ const uint32_t* pDeviceMasks = {};
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
};
static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceGroupRenderPassBeginInfo
{
- struct DeviceGroupRenderPassBeginInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0,
- uint32_t deviceRenderAreaCount_ = 0,
- const vk::Rect2D* pDeviceRenderAreas_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : deviceMask( deviceMask_ )
- , deviceRenderAreaCount( deviceRenderAreaCount_ )
- , pDeviceRenderAreas( pDeviceRenderAreas_ )
- {}
-
- DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>(this) = rhs;
- }
-
- DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
- const void* pNext = nullptr;
- uint32_t deviceMask;
- uint32_t deviceRenderAreaCount;
- const vk::Rect2D* pDeviceRenderAreas;
- };
- static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupRenderPassBeginInfo : public layout::DeviceGroupRenderPassBeginInfo
- {
- VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = 0,
- uint32_t deviceRenderAreaCount_ = 0,
- const vk::Rect2D* pDeviceRenderAreas_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupRenderPassBeginInfo( deviceMask_, deviceRenderAreaCount_, pDeviceRenderAreas_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {},
+ uint32_t deviceRenderAreaCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceMask( deviceMask_ )
+ , deviceRenderAreaCount( deviceRenderAreaCount_ )
+ , pDeviceRenderAreas( pDeviceRenderAreas_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) );
+ return *this;
+ }
+
DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupRenderPassBeginInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupRenderPassBeginInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>(&rhs);
return *this;
}
@@ -31310,7 +29897,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const vk::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
{
pDeviceRenderAreas = pDeviceRenderAreas_;
return *this;
@@ -31340,73 +29927,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupRenderPassBeginInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
+ const void* pNext = {};
+ uint32_t deviceMask = {};
+ uint32_t deviceRenderAreaCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {};
};
static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceGroupSubmitInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0,
- const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr,
- uint32_t commandBufferCount_ = 0,
- const uint32_t* pCommandBufferDeviceMasks_ = nullptr,
- uint32_t signalSemaphoreCount_ = 0,
- const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
- , commandBufferCount( commandBufferCount_ )
- , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
- {}
-
- DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupSubmitInfo*>(this) = rhs;
- }
-
- DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupSubmitInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
- const void* pNext = nullptr;
- uint32_t waitSemaphoreCount;
- const uint32_t* pWaitSemaphoreDeviceIndices;
- uint32_t commandBufferCount;
- const uint32_t* pCommandBufferDeviceMasks;
- uint32_t signalSemaphoreCount;
- const uint32_t* pSignalSemaphoreDeviceIndices;
- };
- static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupSubmitInfo : public layout::DeviceGroupSubmitInfo
+ struct DeviceGroupSubmitInfo
{
- VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = 0,
- const uint32_t* pWaitSemaphoreDeviceIndices_ = nullptr,
- uint32_t commandBufferCount_ = 0,
- const uint32_t* pCommandBufferDeviceMasks_ = nullptr,
- uint32_t signalSemaphoreCount_ = 0,
- const uint32_t* pSignalSemaphoreDeviceIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupSubmitInfo( waitSemaphoreCount_, pWaitSemaphoreDeviceIndices_, commandBufferCount_, pCommandBufferDeviceMasks_, signalSemaphoreCount_, pSignalSemaphoreDeviceIndices_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {},
+ const uint32_t* pWaitSemaphoreDeviceIndices_ = {},
+ uint32_t commandBufferCount_ = {},
+ const uint32_t* pCommandBufferDeviceMasks_ = {},
+ uint32_t signalSemaphoreCount_ = {},
+ const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
+ , commandBufferCount( commandBufferCount_ )
+ , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) );
+ return *this;
+ }
+
DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupSubmitInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupSubmitInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>(&rhs);
return *this;
}
@@ -31479,53 +30039,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupSubmitInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
+ const void* pNext = {};
+ uint32_t waitSemaphoreCount = {};
+ const uint32_t* pWaitSemaphoreDeviceIndices = {};
+ uint32_t commandBufferCount = {};
+ const uint32_t* pCommandBufferDeviceMasks = {};
+ uint32_t signalSemaphoreCount = {};
+ const uint32_t* pSignalSemaphoreDeviceIndices = {};
};
static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceGroupSwapchainCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( vk::DeviceGroupPresentModeFlagsKHR modes_ = vk::DeviceGroupPresentModeFlagsKHR() ) VULKAN_HPP_NOEXCEPT
- : modes( modes_ )
- {}
-
- DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>(this) = rhs;
- }
-
- DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
- const void* pNext = nullptr;
- vk::DeviceGroupPresentModeFlagsKHR modes;
- };
- static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceGroupSwapchainCreateInfoKHR : public layout::DeviceGroupSwapchainCreateInfoKHR
+ struct DeviceGroupSwapchainCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( vk::DeviceGroupPresentModeFlagsKHR modes_ = vk::DeviceGroupPresentModeFlagsKHR() ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupSwapchainCreateInfoKHR( modes_ )
+ VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : modes( modes_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceGroupSwapchainCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceGroupSwapchainCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -31535,7 +30081,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceGroupSwapchainCreateInfoKHR & setModes( vk::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
+ DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
{
modes = modes_;
return *this;
@@ -31563,53 +30109,99 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceGroupSwapchainCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
};
static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceMemoryOpaqueCaptureAddressInfo
{
- struct DeviceMemoryOverallocationCreateInfoAMD
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( vk::MemoryOverallocationBehaviorAMD overallocationBehavior_ = vk::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
- : overallocationBehavior( overallocationBehavior_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) - offsetof( DeviceMemoryOpaqueCaptureAddressInfo, pNext ) );
+ return *this;
+ }
- DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>(this) = rhs;
- }
+ DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>(this) = rhs;
- return *this;
- }
+ DeviceMemoryOpaqueCaptureAddressInfo& operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
- const void* pNext = nullptr;
- vk::MemoryOverallocationBehaviorAMD overallocationBehavior;
- };
- static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "layout struct and wrapper have different size!" );
- }
+ DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
- struct DeviceMemoryOverallocationCreateInfoAMD : public layout::DeviceMemoryOverallocationCreateInfoAMD
+ DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memory = memory_;
+ return *this;
+ }
+
+ operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+ }
+
+ operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
+ }
+
+ bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memory == rhs.memory );
+ }
+
+ bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ };
+ static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
+
+ struct DeviceMemoryOverallocationCreateInfoAMD
{
- VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( vk::MemoryOverallocationBehaviorAMD overallocationBehavior_ = vk::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceMemoryOverallocationCreateInfoAMD( overallocationBehavior_ )
+ VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
+ : overallocationBehavior( overallocationBehavior_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) );
+ return *this;
+ }
+
DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceMemoryOverallocationCreateInfoAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceMemoryOverallocationCreateInfoAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>(&rhs);
return *this;
}
@@ -31619,7 +30211,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( vk::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
+ DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
{
overallocationBehavior = overallocationBehavior_;
return *this;
@@ -31647,53 +30239,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceMemoryOverallocationCreateInfoAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
};
static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DeviceQueueGlobalPriorityCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( vk::QueueGlobalPriorityEXT globalPriority_ = vk::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
- : globalPriority( globalPriority_ )
- {}
-
- DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>(this) = rhs;
- }
-
- DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
- const void* pNext = nullptr;
- vk::QueueGlobalPriorityEXT globalPriority;
- };
- static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceQueueGlobalPriorityCreateInfoEXT : public layout::DeviceQueueGlobalPriorityCreateInfoEXT
+ struct DeviceQueueGlobalPriorityCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( vk::QueueGlobalPriorityEXT globalPriority_ = vk::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceQueueGlobalPriorityCreateInfoEXT( globalPriority_ )
+ VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
+ : globalPriority( globalPriority_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceQueueGlobalPriorityCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceQueueGlobalPriorityCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -31703,7 +30276,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( vk::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
{
globalPriority = globalPriority_;
return *this;
@@ -31731,61 +30304,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceQueueGlobalPriorityCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
};
static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DeviceQueueInfo2
{
- struct DeviceQueueInfo2
- {
- protected:
- VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(),
- uint32_t queueFamilyIndex_ = 0,
- uint32_t queueIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queueFamilyIndex( queueFamilyIndex_ )
- , queueIndex( queueIndex_ )
- {}
-
- DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceQueueInfo2*>(this) = rhs;
- }
-
- DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDeviceQueueInfo2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDeviceQueueInfo2;
- const void* pNext = nullptr;
- vk::DeviceQueueCreateFlags flags;
- uint32_t queueFamilyIndex;
- uint32_t queueIndex;
- };
- static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "layout struct and wrapper have different size!" );
- }
-
- struct DeviceQueueInfo2 : public layout::DeviceQueueInfo2
- {
- VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( vk::DeviceQueueCreateFlags flags_ = vk::DeviceQueueCreateFlags(),
- uint32_t queueFamilyIndex_ = 0,
- uint32_t queueIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceQueueInfo2( flags_, queueFamilyIndex_, queueIndex_ )
+ VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {},
+ uint32_t queueFamilyIndex_ = {},
+ uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , queueFamilyIndex( queueFamilyIndex_ )
+ , queueIndex( queueIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & operator=( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) );
+ return *this;
+ }
+
DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DeviceQueueInfo2( rhs )
- {}
+ {
+ *this = rhs;
+ }
DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DeviceQueueInfo2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>(&rhs);
return *this;
}
@@ -31795,7 +30345,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DeviceQueueInfo2 & setFlags( vk::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -31837,17 +30387,21 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DeviceQueueInfo2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
+ uint32_t queueFamilyIndex = {};
+ uint32_t queueIndex = {};
};
static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
struct DispatchIndirectCommand
{
- VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = 0,
- uint32_t y_ = 0,
- uint32_t z_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {},
+ uint32_t y_ = {},
+ uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, z( z_ )
@@ -31855,12 +30409,12 @@ namespace VULKAN_HPP_NAMESPACE
DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDispatchIndirectCommand*>(this) = rhs;
+ *this = rhs;
}
DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDispatchIndirectCommand*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>(&rhs);
return *this;
}
@@ -31905,54 +30459,33 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t x;
- uint32_t y;
- uint32_t z;
+ uint32_t x = {};
+ uint32_t y = {};
+ uint32_t z = {};
};
static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DisplayEventInfoEXT
{
- struct DisplayEventInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( vk::DisplayEventTypeEXT displayEvent_ = vk::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
- : displayEvent( displayEvent_ )
- {}
-
- DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayEventInfoEXT*>(this) = rhs;
- }
-
- DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayEventInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayEventInfoEXT;
- const void* pNext = nullptr;
- vk::DisplayEventTypeEXT displayEvent;
- };
- static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayEventInfoEXT : public layout::DisplayEventInfoEXT
- {
- VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( vk::DisplayEventTypeEXT displayEvent_ = vk::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayEventInfoEXT( displayEvent_ )
+ VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
+ : displayEvent( displayEvent_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) - offsetof( DisplayEventInfoEXT, pNext ) );
+ return *this;
+ }
+
DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayEventInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayEventInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>(&rhs);
return *this;
}
@@ -31962,7 +30495,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplayEventInfoEXT & setDisplayEvent( vk::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
+ DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
{
displayEvent = displayEvent_;
return *this;
@@ -31990,32 +30523,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayEventInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
};
static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
struct DisplayModeParametersKHR
{
- VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( vk::Extent2D visibleRegion_ = vk::Extent2D(),
- uint32_t refreshRate_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {},
+ uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
: visibleRegion( visibleRegion_ )
, refreshRate( refreshRate_ )
{}
DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayModeParametersKHR*>(this) = rhs;
+ *this = rhs;
}
DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayModeParametersKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>(&rhs);
return *this;
}
- DisplayModeParametersKHR & setVisibleRegion( vk::Extent2D visibleRegion_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ ) VULKAN_HPP_NOEXCEPT
{
visibleRegion = visibleRegion_;
return *this;
@@ -32049,57 +30584,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Extent2D visibleRegion;
- uint32_t refreshRate;
+ VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
+ uint32_t refreshRate = {};
};
static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DisplayModeCreateInfoKHR
{
- struct DisplayModeCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( vk::DisplayModeCreateFlagsKHR flags_ = vk::DisplayModeCreateFlagsKHR(),
- vk::DisplayModeParametersKHR parameters_ = vk::DisplayModeParametersKHR() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , parameters( parameters_ )
- {}
-
- DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayModeCreateInfoKHR*>(this) = rhs;
- }
-
- DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayModeCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
- const void* pNext = nullptr;
- vk::DisplayModeCreateFlagsKHR flags;
- vk::DisplayModeParametersKHR parameters;
- };
- static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayModeCreateInfoKHR : public layout::DisplayModeCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( vk::DisplayModeCreateFlagsKHR flags_ = vk::DisplayModeCreateFlagsKHR(),
- vk::DisplayModeParametersKHR parameters_ = vk::DisplayModeParametersKHR() ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayModeCreateInfoKHR( flags_, parameters_ )
+ VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , parameters( parameters_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) - offsetof( DisplayModeCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayModeCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayModeCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -32109,13 +30621,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplayModeCreateInfoKHR & setFlags( vk::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- DisplayModeCreateInfoKHR & setParameters( vk::DisplayModeParametersKHR parameters_ ) VULKAN_HPP_NOEXCEPT
+ DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ ) VULKAN_HPP_NOEXCEPT
{
parameters = parameters_;
return *this;
@@ -32144,25 +30656,31 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayModeCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
};
static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct DisplayModePropertiesKHR
{
- DisplayModePropertiesKHR() VULKAN_HPP_NOEXCEPT
+ DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT
+ : displayMode( displayMode_ )
+ , parameters( parameters_ )
{}
DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayModePropertiesKHR*>(this) = rhs;
+ *this = rhs;
}
DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayModePropertiesKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>(&rhs);
return *this;
}
@@ -32188,52 +30706,32 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DisplayModeKHR displayMode;
- vk::DisplayModeParametersKHR parameters;
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
};
static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DisplayModeProperties2KHR
- {
- protected:
- DisplayModeProperties2KHR() VULKAN_HPP_NOEXCEPT
- {}
-
- DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayModeProperties2KHR*>(this) = rhs;
- }
-
- DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayModeProperties2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayModeProperties2KHR;
- void* pNext = nullptr;
- vk::DisplayModePropertiesKHR displayModeProperties;
- };
- static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayModeProperties2KHR : public layout::DisplayModeProperties2KHR
+ struct DisplayModeProperties2KHR
{
- DisplayModeProperties2KHR() VULKAN_HPP_NOEXCEPT
- : layout::DisplayModeProperties2KHR()
+ DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : displayModeProperties( displayModeProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) - offsetof( DisplayModeProperties2KHR, pNext ) );
+ return *this;
+ }
+
DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayModeProperties2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayModeProperties2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>(&rhs);
return *this;
}
@@ -32259,52 +30757,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayModeProperties2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
};
static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DisplayNativeHdrSurfaceCapabilitiesAMD
{
- struct DisplayNativeHdrSurfaceCapabilitiesAMD
- {
- protected:
- DisplayNativeHdrSurfaceCapabilitiesAMD() VULKAN_HPP_NOEXCEPT
- {}
-
- DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>(this) = rhs;
- }
-
- DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
- void* pNext = nullptr;
- vk::Bool32 localDimmingSupport;
- };
- static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayNativeHdrSurfaceCapabilitiesAMD : public layout::DisplayNativeHdrSurfaceCapabilitiesAMD
- {
- DisplayNativeHdrSurfaceCapabilitiesAMD() VULKAN_HPP_NOEXCEPT
- : layout::DisplayNativeHdrSurfaceCapabilitiesAMD()
+ DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT
+ : localDimmingSupport( localDimmingSupport_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) );
+ return *this;
+ }
+
DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayNativeHdrSurfaceCapabilitiesAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayNativeHdrSurfaceCapabilitiesAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>(&rhs);
return *this;
}
@@ -32330,25 +30810,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayNativeHdrSurfaceCapabilitiesAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
};
static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
struct DisplayPlaneCapabilitiesKHR
{
- DisplayPlaneCapabilitiesKHR() VULKAN_HPP_NOEXCEPT
+ DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT
+ : supportedAlpha( supportedAlpha_ )
+ , minSrcPosition( minSrcPosition_ )
+ , maxSrcPosition( maxSrcPosition_ )
+ , minSrcExtent( minSrcExtent_ )
+ , maxSrcExtent( maxSrcExtent_ )
+ , minDstPosition( minDstPosition_ )
+ , maxDstPosition( maxDstPosition_ )
+ , minDstExtent( minDstExtent_ )
+ , maxDstExtent( maxDstExtent_ )
{}
DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>(this) = rhs;
+ *this = rhs;
}
DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>(&rhs);
return *this;
}
@@ -32381,59 +30880,39 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DisplayPlaneAlphaFlagsKHR supportedAlpha;
- vk::Offset2D minSrcPosition;
- vk::Offset2D maxSrcPosition;
- vk::Extent2D minSrcExtent;
- vk::Extent2D maxSrcExtent;
- vk::Offset2D minDstPosition;
- vk::Offset2D maxDstPosition;
- vk::Extent2D minDstExtent;
- vk::Extent2D maxDstExtent;
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
+ VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
+ VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
+ VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
+ VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
};
static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DisplayPlaneCapabilities2KHR
- {
- protected:
- DisplayPlaneCapabilities2KHR() VULKAN_HPP_NOEXCEPT
- {}
-
- DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>(this) = rhs;
- }
-
- DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
- void* pNext = nullptr;
- vk::DisplayPlaneCapabilitiesKHR capabilities;
- };
- static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayPlaneCapabilities2KHR : public layout::DisplayPlaneCapabilities2KHR
+ struct DisplayPlaneCapabilities2KHR
{
- DisplayPlaneCapabilities2KHR() VULKAN_HPP_NOEXCEPT
- : layout::DisplayPlaneCapabilities2KHR()
+ DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT
+ : capabilities( capabilities_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) );
+ return *this;
+ }
+
DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPlaneCapabilities2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayPlaneCapabilities2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>(&rhs);
return *this;
}
@@ -32459,57 +30938,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayPlaneCapabilities2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
};
static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DisplayPlaneInfo2KHR
{
- struct DisplayPlaneInfo2KHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( vk::DisplayModeKHR mode_ = vk::DisplayModeKHR(),
- uint32_t planeIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : mode( mode_ )
- , planeIndex( planeIndex_ )
- {}
-
- DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPlaneInfo2KHR*>(this) = rhs;
- }
-
- DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPlaneInfo2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
- const void* pNext = nullptr;
- vk::DisplayModeKHR mode;
- uint32_t planeIndex;
- };
- static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayPlaneInfo2KHR : public layout::DisplayPlaneInfo2KHR
- {
- VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( vk::DisplayModeKHR mode_ = vk::DisplayModeKHR(),
- uint32_t planeIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPlaneInfo2KHR( mode_, planeIndex_ )
+ VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {},
+ uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : mode( mode_ )
+ , planeIndex( planeIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) - offsetof( DisplayPlaneInfo2KHR, pNext ) );
+ return *this;
+ }
+
DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPlaneInfo2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayPlaneInfo2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>(&rhs);
return *this;
}
@@ -32519,7 +30977,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplayPlaneInfo2KHR & setMode( vk::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
{
mode = mode_;
return *this;
@@ -32554,25 +31012,31 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayPlaneInfo2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
+ uint32_t planeIndex = {};
};
static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
struct DisplayPlanePropertiesKHR
{
- DisplayPlanePropertiesKHR() VULKAN_HPP_NOEXCEPT
+ DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {},
+ uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : currentDisplay( currentDisplay_ )
+ , currentStackIndex( currentStackIndex_ )
{}
DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayPlanePropertiesKHR*>(this) = rhs;
+ *this = rhs;
}
DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayPlanePropertiesKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>(&rhs);
return *this;
}
@@ -32598,52 +31062,32 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DisplayKHR currentDisplay;
- uint32_t currentStackIndex;
+ VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
+ uint32_t currentStackIndex = {};
};
static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DisplayPlaneProperties2KHR
- {
- protected:
- DisplayPlaneProperties2KHR() VULKAN_HPP_NOEXCEPT
- {}
-
- DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPlaneProperties2KHR*>(this) = rhs;
- }
-
- DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPlaneProperties2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
- void* pNext = nullptr;
- vk::DisplayPlanePropertiesKHR displayPlaneProperties;
- };
- static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayPlaneProperties2KHR : public layout::DisplayPlaneProperties2KHR
+ struct DisplayPlaneProperties2KHR
{
- DisplayPlaneProperties2KHR() VULKAN_HPP_NOEXCEPT
- : layout::DisplayPlaneProperties2KHR()
+ DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : displayPlaneProperties( displayPlaneProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) );
+ return *this;
+ }
+
DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPlaneProperties2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayPlaneProperties2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>(&rhs);
return *this;
}
@@ -32669,53 +31113,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayPlaneProperties2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
};
static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DisplayPowerInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( vk::DisplayPowerStateEXT powerState_ = vk::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
- : powerState( powerState_ )
- {}
-
- DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPowerInfoEXT*>(this) = rhs;
- }
-
- DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPowerInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayPowerInfoEXT;
- const void* pNext = nullptr;
- vk::DisplayPowerStateEXT powerState;
- };
- static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayPowerInfoEXT : public layout::DisplayPowerInfoEXT
+ struct DisplayPowerInfoEXT
{
- VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( vk::DisplayPowerStateEXT powerState_ = vk::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPowerInfoEXT( powerState_ )
+ VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
+ : powerState( powerState_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) - offsetof( DisplayPowerInfoEXT, pNext ) );
+ return *this;
+ }
+
DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPowerInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayPowerInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>(&rhs);
return *this;
}
@@ -32725,7 +31150,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplayPowerInfoEXT & setPowerState( vk::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
{
powerState = powerState_;
return *this;
@@ -32753,61 +31178,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayPowerInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
};
static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DisplayPresentInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( vk::Rect2D srcRect_ = vk::Rect2D(),
- vk::Rect2D dstRect_ = vk::Rect2D(),
- vk::Bool32 persistent_ = 0 ) VULKAN_HPP_NOEXCEPT
- : srcRect( srcRect_ )
- , dstRect( dstRect_ )
- , persistent( persistent_ )
- {}
-
- DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPresentInfoKHR*>(this) = rhs;
- }
-
- DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayPresentInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayPresentInfoKHR;
- const void* pNext = nullptr;
- vk::Rect2D srcRect;
- vk::Rect2D dstRect;
- vk::Bool32 persistent;
- };
- static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayPresentInfoKHR : public layout::DisplayPresentInfoKHR
+ struct DisplayPresentInfoKHR
{
- VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( vk::Rect2D srcRect_ = vk::Rect2D(),
- vk::Rect2D dstRect_ = vk::Rect2D(),
- vk::Bool32 persistent_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPresentInfoKHR( srcRect_, dstRect_, persistent_ )
+ VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {},
+ VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT
+ : srcRect( srcRect_ )
+ , dstRect( dstRect_ )
+ , persistent( persistent_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) - offsetof( DisplayPresentInfoKHR, pNext ) );
+ return *this;
+ }
+
DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayPresentInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayPresentInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>(&rhs);
return *this;
}
@@ -32817,19 +31219,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplayPresentInfoKHR & setSrcRect( vk::Rect2D srcRect_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ ) VULKAN_HPP_NOEXCEPT
{
srcRect = srcRect_;
return *this;
}
- DisplayPresentInfoKHR & setDstRect( vk::Rect2D dstRect_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D dstRect_ ) VULKAN_HPP_NOEXCEPT
{
dstRect = dstRect_;
return *this;
}
- DisplayPresentInfoKHR & setPersistent( vk::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
+ DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
{
persistent = persistent_;
return *this;
@@ -32859,25 +31261,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayPresentInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
+ VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
+ VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
};
static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct DisplayPropertiesKHR
{
- DisplayPropertiesKHR() VULKAN_HPP_NOEXCEPT
+ DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {},
+ const char* displayName_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT
+ : display( display_ )
+ , displayName( displayName_ )
+ , physicalDimensions( physicalDimensions_ )
+ , physicalResolution( physicalResolution_ )
+ , supportedTransforms( supportedTransforms_ )
+ , planeReorderPossible( planeReorderPossible_ )
+ , persistentContent( persistentContent_ )
{}
DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayPropertiesKHR*>(this) = rhs;
+ *this = rhs;
}
DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDisplayPropertiesKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>(&rhs);
return *this;
}
@@ -32908,57 +31327,37 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DisplayKHR display;
- const char* displayName;
- vk::Extent2D physicalDimensions;
- vk::Extent2D physicalResolution;
- vk::SurfaceTransformFlagsKHR supportedTransforms;
- vk::Bool32 planeReorderPossible;
- vk::Bool32 persistentContent;
+ VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
+ const char* displayName = {};
+ VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
+ VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
+ VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
};
static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct DisplayProperties2KHR
- {
- protected:
- DisplayProperties2KHR() VULKAN_HPP_NOEXCEPT
- {}
-
- DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayProperties2KHR*>(this) = rhs;
- }
-
- DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplayProperties2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplayProperties2KHR;
- void* pNext = nullptr;
- vk::DisplayPropertiesKHR displayProperties;
- };
- static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplayProperties2KHR : public layout::DisplayProperties2KHR
+ struct DisplayProperties2KHR
{
- DisplayProperties2KHR() VULKAN_HPP_NOEXCEPT
- : layout::DisplayProperties2KHR()
+ DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : displayProperties( displayProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplayProperties2KHR & operator=( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) - offsetof( DisplayProperties2KHR, pNext ) );
+ return *this;
+ }
+
DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplayProperties2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplayProperties2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>(&rhs);
return *this;
}
@@ -32984,81 +31383,48 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplayProperties2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
};
static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DisplaySurfaceCreateInfoKHR
{
- struct DisplaySurfaceCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( vk::DisplaySurfaceCreateFlagsKHR flags_ = vk::DisplaySurfaceCreateFlagsKHR(),
- vk::DisplayModeKHR displayMode_ = vk::DisplayModeKHR(),
- uint32_t planeIndex_ = 0,
- uint32_t planeStackIndex_ = 0,
- vk::SurfaceTransformFlagBitsKHR transform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity,
- float globalAlpha_ = 0,
- vk::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = vk::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
- vk::Extent2D imageExtent_ = vk::Extent2D() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , displayMode( displayMode_ )
- , planeIndex( planeIndex_ )
- , planeStackIndex( planeStackIndex_ )
- , transform( transform_ )
- , globalAlpha( globalAlpha_ )
- , alphaMode( alphaMode_ )
- , imageExtent( imageExtent_ )
- {}
-
- DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>(this) = rhs;
- }
-
- DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
- const void* pNext = nullptr;
- vk::DisplaySurfaceCreateFlagsKHR flags;
- vk::DisplayModeKHR displayMode;
- uint32_t planeIndex;
- uint32_t planeStackIndex;
- vk::SurfaceTransformFlagBitsKHR transform;
- float globalAlpha;
- vk::DisplayPlaneAlphaFlagBitsKHR alphaMode;
- vk::Extent2D imageExtent;
- };
- static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct DisplaySurfaceCreateInfoKHR : public layout::DisplaySurfaceCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( vk::DisplaySurfaceCreateFlagsKHR flags_ = vk::DisplaySurfaceCreateFlagsKHR(),
- vk::DisplayModeKHR displayMode_ = vk::DisplayModeKHR(),
- uint32_t planeIndex_ = 0,
- uint32_t planeStackIndex_ = 0,
- vk::SurfaceTransformFlagBitsKHR transform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity,
- float globalAlpha_ = 0,
- vk::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = vk::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
- vk::Extent2D imageExtent_ = vk::Extent2D() ) VULKAN_HPP_NOEXCEPT
- : layout::DisplaySurfaceCreateInfoKHR( flags_, displayMode_, planeIndex_, planeStackIndex_, transform_, globalAlpha_, alphaMode_, imageExtent_ )
+ VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {},
+ uint32_t planeIndex_ = {},
+ uint32_t planeStackIndex_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ float globalAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , displayMode( displayMode_ )
+ , planeIndex( planeIndex_ )
+ , planeStackIndex( planeStackIndex_ )
+ , transform( transform_ )
+ , globalAlpha( globalAlpha_ )
+ , alphaMode( alphaMode_ )
+ , imageExtent( imageExtent_ )
{}
+ VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DisplaySurfaceCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DisplaySurfaceCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -33068,13 +31434,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplaySurfaceCreateInfoKHR & setFlags( vk::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setDisplayMode( vk::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
{
displayMode = displayMode_;
return *this;
@@ -33092,7 +31458,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplaySurfaceCreateInfoKHR & setTransform( vk::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
{
transform = transform_;
return *this;
@@ -33104,13 +31470,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- DisplaySurfaceCreateInfoKHR & setAlphaMode( vk::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
{
alphaMode = alphaMode_;
return *this;
}
- DisplaySurfaceCreateInfoKHR & setImageExtent( vk::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
@@ -33134,9 +31500,9 @@ namespace VULKAN_HPP_NAMESPACE
&& ( displayMode == rhs.displayMode )
&& ( planeIndex == rhs.planeIndex )
&& ( planeStackIndex == rhs.planeStackIndex )
- && vk::operator==( transform, rhs.transform )
+ && ( transform == rhs.transform )
&& ( globalAlpha == rhs.globalAlpha )
- && vk::operator==( alphaMode, rhs.alphaMode )
+ && ( alphaMode == rhs.alphaMode )
&& ( imageExtent == rhs.imageExtent );
}
@@ -33145,19 +31511,28 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DisplaySurfaceCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
+ uint32_t planeIndex = {};
+ uint32_t planeStackIndex = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ float globalAlpha = {};
+ VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
};
static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct DrawIndexedIndirectCommand
{
- VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = 0,
- uint32_t instanceCount_ = 0,
- uint32_t firstIndex_ = 0,
- int32_t vertexOffset_ = 0,
- uint32_t firstInstance_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {},
+ uint32_t instanceCount_ = {},
+ uint32_t firstIndex_ = {},
+ int32_t vertexOffset_ = {},
+ uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
: indexCount( indexCount_ )
, instanceCount( instanceCount_ )
, firstIndex( firstIndex_ )
@@ -33167,12 +31542,12 @@ namespace VULKAN_HPP_NAMESPACE
DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrawIndexedIndirectCommand*>(this) = rhs;
+ *this = rhs;
}
DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrawIndexedIndirectCommand*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>(&rhs);
return *this;
}
@@ -33231,21 +31606,21 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t indexCount;
- uint32_t instanceCount;
- uint32_t firstIndex;
- int32_t vertexOffset;
- uint32_t firstInstance;
+ uint32_t indexCount = {};
+ uint32_t instanceCount = {};
+ uint32_t firstIndex = {};
+ int32_t vertexOffset = {};
+ uint32_t firstInstance = {};
};
static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
struct DrawIndirectCommand
{
- VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = 0,
- uint32_t instanceCount_ = 0,
- uint32_t firstVertex_ = 0,
- uint32_t firstInstance_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {},
+ uint32_t instanceCount_ = {},
+ uint32_t firstVertex_ = {},
+ uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
: vertexCount( vertexCount_ )
, instanceCount( instanceCount_ )
, firstVertex( firstVertex_ )
@@ -33254,12 +31629,12 @@ namespace VULKAN_HPP_NAMESPACE
DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrawIndirectCommand*>(this) = rhs;
+ *this = rhs;
}
DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrawIndirectCommand*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>(&rhs);
return *this;
}
@@ -33311,30 +31686,30 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t vertexCount;
- uint32_t instanceCount;
- uint32_t firstVertex;
- uint32_t firstInstance;
+ uint32_t vertexCount = {};
+ uint32_t instanceCount = {};
+ uint32_t firstVertex = {};
+ uint32_t firstInstance = {};
};
static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
struct DrawMeshTasksIndirectCommandNV
{
- VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = 0,
- uint32_t firstTask_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {},
+ uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT
: taskCount( taskCount_ )
, firstTask( firstTask_ )
{}
DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>(this) = rhs;
+ *this = rhs;
}
DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>(&rhs);
return *this;
}
@@ -33372,25 +31747,30 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t taskCount;
- uint32_t firstTask;
+ uint32_t taskCount = {};
+ uint32_t firstTask = {};
};
static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
struct DrmFormatModifierPropertiesEXT
{
- DrmFormatModifierPropertiesEXT() VULKAN_HPP_NOEXCEPT
+ DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {},
+ uint32_t drmFormatModifierPlaneCount_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ )
+ , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
+ , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
{}
DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>(this) = rhs;
+ *this = rhs;
}
DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>(&rhs);
return *this;
}
@@ -33417,54 +31797,35 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint64_t drmFormatModifier;
- uint32_t drmFormatModifierPlaneCount;
- vk::FormatFeatureFlags drmFormatModifierTilingFeatures;
+ uint64_t drmFormatModifier = {};
+ uint32_t drmFormatModifierPlaneCount = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
};
static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct DrmFormatModifierPropertiesListEXT
{
- struct DrmFormatModifierPropertiesListEXT
- {
- protected:
- DrmFormatModifierPropertiesListEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>(this) = rhs;
- }
-
- DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
- void* pNext = nullptr;
- uint32_t drmFormatModifierCount;
- vk::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties;
- };
- static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct DrmFormatModifierPropertiesListEXT : public layout::DrmFormatModifierPropertiesListEXT
- {
- DrmFormatModifierPropertiesListEXT() VULKAN_HPP_NOEXCEPT
- : layout::DrmFormatModifierPropertiesListEXT()
+ DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {},
+ VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifierCount( drmFormatModifierCount_ )
+ , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT & operator=( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) );
+ return *this;
+ }
+
DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::DrmFormatModifierPropertiesListEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::DrmFormatModifierPropertiesListEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>(&rhs);
return *this;
}
@@ -33491,53 +31852,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::DrmFormatModifierPropertiesListEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
+ void* pNext = {};
+ uint32_t drmFormatModifierCount = {};
+ VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {};
};
static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct EventCreateInfo
{
- struct EventCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR EventCreateInfo( vk::EventCreateFlags flags_ = vk::EventCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
-
- EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkEventCreateInfo*>(this) = rhs;
- }
-
- EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkEventCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eEventCreateInfo;
- const void* pNext = nullptr;
- vk::EventCreateFlags flags;
- };
- static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct EventCreateInfo : public layout::EventCreateInfo
- {
- VULKAN_HPP_CONSTEXPR EventCreateInfo( vk::EventCreateFlags flags_ = vk::EventCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::EventCreateInfo( flags_ )
+ VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::EventCreateInfo & operator=( VULKAN_HPP_NAMESPACE::EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) - offsetof( EventCreateInfo, pNext ) );
+ return *this;
+ }
+
EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::EventCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::EventCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>(&rhs);
return *this;
}
@@ -33547,7 +31890,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- EventCreateInfo & setFlags( vk::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -33575,53 +31918,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::EventCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
};
static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ExportFenceCreateInfo
{
- struct ExportFenceCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( vk::ExternalFenceHandleTypeFlags handleTypes_ = vk::ExternalFenceHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportFenceCreateInfo*>(this) = rhs;
- }
-
- ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportFenceCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportFenceCreateInfo;
- const void* pNext = nullptr;
- vk::ExternalFenceHandleTypeFlags handleTypes;
- };
- static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportFenceCreateInfo : public layout::ExportFenceCreateInfo
- {
- VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( vk::ExternalFenceHandleTypeFlags handleTypes_ = vk::ExternalFenceHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ExportFenceCreateInfo( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) - offsetof( ExportFenceCreateInfo, pNext ) );
+ return *this;
+ }
+
ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportFenceCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportFenceCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>(&rhs);
return *this;
}
@@ -33631,7 +31955,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExportFenceCreateInfo & setHandleTypes( vk::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -33659,63 +31983,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportFenceCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
};
static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct ExportFenceWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- , name( name_ )
- {}
-
- ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>(this) = rhs;
- }
-
- ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
- const void* pNext = nullptr;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- LPCWSTR name;
- };
- static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportFenceWin32HandleInfoKHR : public layout::ExportFenceWin32HandleInfoKHR
+ struct ExportFenceWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ExportFenceWin32HandleInfoKHR( pAttributes_, dwAccess_, name_ )
+ VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {},
+ DWORD dwAccess_ = {},
+ LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ , name( name_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportFenceWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportFenceWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -33767,54 +32068,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportFenceWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
+ const void* pNext = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+ LPCWSTR name = {};
};
static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
+ struct ExportMemoryAllocateInfo
{
- struct ExportMemoryAllocateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryAllocateInfo*>(this) = rhs;
- }
-
- ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryAllocateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportMemoryAllocateInfo;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlags handleTypes;
- };
- static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportMemoryAllocateInfo : public layout::ExportMemoryAllocateInfo
- {
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryAllocateInfo( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) - offsetof( ExportMemoryAllocateInfo, pNext ) );
+ return *this;
+ }
+
ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryAllocateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportMemoryAllocateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>(&rhs);
return *this;
}
@@ -33824,7 +32108,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExportMemoryAllocateInfo & setHandleTypes( vk::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -33852,53 +32136,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportMemoryAllocateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ExportMemoryAllocateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryAllocateInfoNV*>(this) = rhs;
- }
-
- ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryAllocateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagsNV handleTypes;
- };
- static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportMemoryAllocateInfoNV : public layout::ExportMemoryAllocateInfoNV
+ struct ExportMemoryAllocateInfoNV
{
- VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryAllocateInfoNV( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) );
+ return *this;
+ }
+
ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryAllocateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportMemoryAllocateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>(&rhs);
return *this;
}
@@ -33908,7 +32173,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExportMemoryAllocateInfoNV & setHandleTypes( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -33936,63 +32201,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportMemoryAllocateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
};
static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct ExportMemoryWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- , name( name_ )
- {}
-
- ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>(this) = rhs;
- }
-
- ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
- const void* pNext = nullptr;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- LPCWSTR name;
- };
- static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportMemoryWin32HandleInfoKHR : public layout::ExportMemoryWin32HandleInfoKHR
+ struct ExportMemoryWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryWin32HandleInfoKHR( pAttributes_, dwAccess_, name_ )
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {},
+ DWORD dwAccess_ = {},
+ LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ , name( name_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportMemoryWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -34044,8 +32286,12 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportMemoryWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
+ const void* pNext = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+ LPCWSTR name = {};
};
static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -34053,51 +32299,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct ExportMemoryWin32HandleInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0 ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- {}
-
- ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>(this) = rhs;
- }
-
- ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
- const void* pNext = nullptr;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- };
- static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportMemoryWin32HandleInfoNV : public layout::ExportMemoryWin32HandleInfoNV
+ struct ExportMemoryWin32HandleInfoNV
{
- VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryWin32HandleInfoNV( pAttributes_, dwAccess_ )
+ VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = {},
+ DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) );
+ return *this;
+ }
+
ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportMemoryWin32HandleInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportMemoryWin32HandleInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>(&rhs);
return *this;
}
@@ -34142,54 +32365,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportMemoryWin32HandleInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
+ const void* pNext = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
};
static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
- {
- struct ExportSemaphoreCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( vk::ExternalSemaphoreHandleTypeFlags handleTypes_ = vk::ExternalSemaphoreHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportSemaphoreCreateInfo*>(this) = rhs;
- }
-
- ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportSemaphoreCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
- const void* pNext = nullptr;
- vk::ExternalSemaphoreHandleTypeFlags handleTypes;
- };
- static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportSemaphoreCreateInfo : public layout::ExportSemaphoreCreateInfo
+ struct ExportSemaphoreCreateInfo
{
- VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( vk::ExternalSemaphoreHandleTypeFlags handleTypes_ = vk::ExternalSemaphoreHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ExportSemaphoreCreateInfo( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) - offsetof( ExportSemaphoreCreateInfo, pNext ) );
+ return *this;
+ }
+
ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportSemaphoreCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportSemaphoreCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>(&rhs);
return *this;
}
@@ -34199,7 +32404,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExportSemaphoreCreateInfo & setHandleTypes( vk::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -34227,63 +32432,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportSemaphoreCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
};
static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
+ struct ExportSemaphoreWin32HandleInfoKHR
{
- struct ExportSemaphoreWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pAttributes( pAttributes_ )
- , dwAccess( dwAccess_ )
- , name( name_ )
- {}
-
- ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>(this) = rhs;
- }
-
- ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
- const void* pNext = nullptr;
- const SECURITY_ATTRIBUTES* pAttributes;
- DWORD dwAccess;
- LPCWSTR name;
- };
- static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ExportSemaphoreWin32HandleInfoKHR : public layout::ExportSemaphoreWin32HandleInfoKHR
- {
- VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr,
- DWORD dwAccess_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ExportSemaphoreWin32HandleInfoKHR( pAttributes_, dwAccess_, name_ )
+ VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {},
+ DWORD dwAccess_ = {},
+ LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pAttributes( pAttributes_ )
+ , dwAccess( dwAccess_ )
+ , name( name_ )
{}
+ VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExportSemaphoreWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExportSemaphoreWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -34335,8 +32517,12 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExportSemaphoreWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
+ const void* pNext = {};
+ const SECURITY_ATTRIBUTES* pAttributes = {};
+ DWORD dwAccess = {};
+ LPCWSTR name = {};
};
static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -34344,17 +32530,22 @@ namespace VULKAN_HPP_NAMESPACE
struct ExtensionProperties
{
- ExtensionProperties() VULKAN_HPP_NOEXCEPT
- {}
+ ExtensionProperties( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& extensionName_ = {},
+ uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT
+ : extensionName{}
+ , specVersion( specVersion_ )
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( extensionName, extensionName_ );
+ }
ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExtensionProperties*>(this) = rhs;
+ *this = rhs;
}
ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExtensionProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>(&rhs);
return *this;
}
@@ -34380,25 +32571,30 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
- uint32_t specVersion;
+ char extensionName[VK_MAX_EXTENSION_NAME_SIZE] = {};
+ uint32_t specVersion = {};
};
static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
struct ExternalMemoryProperties
{
- ExternalMemoryProperties() VULKAN_HPP_NOEXCEPT
+ ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : externalMemoryFeatures( externalMemoryFeatures_ )
+ , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+ , compatibleHandleTypes( compatibleHandleTypes_ )
{}
ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExternalMemoryProperties*>(this) = rhs;
+ *this = rhs;
}
ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExternalMemoryProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>(&rhs);
return *this;
}
@@ -34425,53 +32621,33 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ExternalMemoryFeatureFlags externalMemoryFeatures;
- vk::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes;
- vk::ExternalMemoryHandleTypeFlags compatibleHandleTypes;
+ VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
};
static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ExternalBufferProperties
{
- struct ExternalBufferProperties
- {
- protected:
- ExternalBufferProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalBufferProperties*>(this) = rhs;
- }
-
- ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalBufferProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalBufferProperties;
- void* pNext = nullptr;
- vk::ExternalMemoryProperties externalMemoryProperties;
- };
- static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalBufferProperties : public layout::ExternalBufferProperties
- {
- ExternalBufferProperties() VULKAN_HPP_NOEXCEPT
- : layout::ExternalBufferProperties()
+ ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : externalMemoryProperties( externalMemoryProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) - offsetof( ExternalBufferProperties, pNext ) );
+ return *this;
+ }
+
ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalBufferProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalBufferProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>(&rhs);
return *this;
}
@@ -34497,54 +32673,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalBufferProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ExternalFenceProperties
{
- struct ExternalFenceProperties
- {
- protected:
- ExternalFenceProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalFenceProperties*>(this) = rhs;
- }
-
- ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalFenceProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalFenceProperties;
- void* pNext = nullptr;
- vk::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes;
- vk::ExternalFenceHandleTypeFlags compatibleHandleTypes;
- vk::ExternalFenceFeatureFlags externalFenceFeatures;
- };
- static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalFenceProperties : public layout::ExternalFenceProperties
- {
- ExternalFenceProperties() VULKAN_HPP_NOEXCEPT
- : layout::ExternalFenceProperties()
+ ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+ , compatibleHandleTypes( compatibleHandleTypes_ )
+ , externalFenceFeatures( externalFenceFeatures_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) - offsetof( ExternalFenceProperties, pNext ) );
+ return *this;
+ }
+
ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalFenceProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalFenceProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>(&rhs);
return *this;
}
@@ -34572,55 +32732,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalFenceProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
};
static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
- {
- struct ExternalFormatANDROID
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) VULKAN_HPP_NOEXCEPT
- : externalFormat( externalFormat_ )
- {}
-
- ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalFormatANDROID*>(this) = rhs;
- }
-
- ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalFormatANDROID*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalFormatANDROID;
- void* pNext = nullptr;
- uint64_t externalFormat;
- };
- static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalFormatANDROID : public layout::ExternalFormatANDROID
+ struct ExternalFormatANDROID
{
- VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalFormatANDROID( externalFormat_ )
+ VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT
+ : externalFormat( externalFormat_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalFormatANDROID & operator=( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) - offsetof( ExternalFormatANDROID, pNext ) );
+ return *this;
+ }
+
ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalFormatANDROID( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalFormatANDROID::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>(&rhs);
return *this;
}
@@ -34658,53 +32801,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalFormatANDROID::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
+ void* pNext = {};
+ uint64_t externalFormat = {};
};
static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- namespace layout
- {
- struct ExternalImageFormatProperties
- {
- protected:
- ExternalImageFormatProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalImageFormatProperties*>(this) = rhs;
- }
-
- ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalImageFormatProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalImageFormatProperties;
- void* pNext = nullptr;
- vk::ExternalMemoryProperties externalMemoryProperties;
- };
- static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalImageFormatProperties : public layout::ExternalImageFormatProperties
+ struct ExternalImageFormatProperties
{
- ExternalImageFormatProperties() VULKAN_HPP_NOEXCEPT
- : layout::ExternalImageFormatProperties()
+ ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : externalMemoryProperties( externalMemoryProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) );
+ return *this;
+ }
+
ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalImageFormatProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalImageFormatProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>(&rhs);
return *this;
}
@@ -34730,25 +32855,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalImageFormatProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
};
static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
struct ImageFormatProperties
{
- ImageFormatProperties() VULKAN_HPP_NOEXCEPT
+ ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {},
+ uint32_t maxMipLevels_ = {},
+ uint32_t maxArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxExtent( maxExtent_ )
+ , maxMipLevels( maxMipLevels_ )
+ , maxArrayLayers( maxArrayLayers_ )
+ , sampleCounts( sampleCounts_ )
+ , maxResourceSize( maxResourceSize_ )
{}
ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageFormatProperties*>(this) = rhs;
+ *this = rhs;
}
ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageFormatProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>(&rhs);
return *this;
}
@@ -34777,28 +32913,35 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Extent3D maxExtent;
- uint32_t maxMipLevels;
- uint32_t maxArrayLayers;
- vk::SampleCountFlags sampleCounts;
- vk::DeviceSize maxResourceSize;
+ VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
+ uint32_t maxMipLevels = {};
+ uint32_t maxArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
};
static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
struct ExternalImageFormatPropertiesNV
{
- ExternalImageFormatPropertiesNV() VULKAN_HPP_NOEXCEPT
+ ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : imageFormatProperties( imageFormatProperties_ )
+ , externalMemoryFeatures( externalMemoryFeatures_ )
+ , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+ , compatibleHandleTypes( compatibleHandleTypes_ )
{}
ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExternalImageFormatPropertiesNV*>(this) = rhs;
+ *this = rhs;
}
ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkExternalImageFormatPropertiesNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>(&rhs);
return *this;
}
@@ -34826,55 +32969,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageFormatProperties imageFormatProperties;
- vk::ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
- vk::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
- vk::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
};
static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ExternalMemoryBufferCreateInfo
{
- struct ExternalMemoryBufferCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>(this) = rhs;
- }
-
- ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlags handleTypes;
- };
- static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalMemoryBufferCreateInfo : public layout::ExternalMemoryBufferCreateInfo
- {
- VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalMemoryBufferCreateInfo( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) );
+ return *this;
+ }
+
ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalMemoryBufferCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalMemoryBufferCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>(&rhs);
return *this;
}
@@ -34884,7 +33006,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExternalMemoryBufferCreateInfo & setHandleTypes( vk::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -34912,53 +33034,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalMemoryBufferCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ExternalMemoryImageCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalMemoryImageCreateInfo*>(this) = rhs;
- }
-
- ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalMemoryImageCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlags handleTypes;
- };
- static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalMemoryImageCreateInfo : public layout::ExternalMemoryImageCreateInfo
+ struct ExternalMemoryImageCreateInfo
{
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( vk::ExternalMemoryHandleTypeFlags handleTypes_ = vk::ExternalMemoryHandleTypeFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalMemoryImageCreateInfo( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) );
+ return *this;
+ }
+
ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalMemoryImageCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalMemoryImageCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>(&rhs);
return *this;
}
@@ -34968,7 +33071,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExternalMemoryImageCreateInfo & setHandleTypes( vk::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -34996,53 +33099,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalMemoryImageCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
};
static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ExternalMemoryImageCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
- : handleTypes( handleTypes_ )
- {}
-
- ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>(this) = rhs;
- }
-
- ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagsNV handleTypes;
- };
- static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalMemoryImageCreateInfoNV : public layout::ExternalMemoryImageCreateInfoNV
+ struct ExternalMemoryImageCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ = vk::ExternalMemoryHandleTypeFlagsNV() ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalMemoryImageCreateInfoNV( handleTypes_ )
+ VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleTypes( handleTypes_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) );
+ return *this;
+ }
+
ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalMemoryImageCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalMemoryImageCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>(&rhs);
return *this;
}
@@ -35052,7 +33136,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ExternalMemoryImageCreateInfoNV & setHandleTypes( vk::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
+ ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
{
handleTypes = handleTypes_;
return *this;
@@ -35080,54 +33164,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalMemoryImageCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
};
static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ExternalSemaphoreProperties
{
- struct ExternalSemaphoreProperties
- {
- protected:
- ExternalSemaphoreProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalSemaphoreProperties*>(this) = rhs;
- }
-
- ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkExternalSemaphoreProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eExternalSemaphoreProperties;
- void* pNext = nullptr;
- vk::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes;
- vk::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes;
- vk::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures;
- };
- static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct ExternalSemaphoreProperties : public layout::ExternalSemaphoreProperties
- {
- ExternalSemaphoreProperties() VULKAN_HPP_NOEXCEPT
- : layout::ExternalSemaphoreProperties()
+ ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
+ , compatibleHandleTypes( compatibleHandleTypes_ )
+ , externalSemaphoreFeatures( externalSemaphoreFeatures_ )
{}
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) );
+ return *this;
+ }
+
ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ExternalSemaphoreProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ExternalSemaphoreProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>(&rhs);
return *this;
}
@@ -35155,53 +33223,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ExternalSemaphoreProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
};
static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct FenceCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR FenceCreateInfo( vk::FenceCreateFlags flags_ = vk::FenceCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
-
- FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFenceCreateInfo*>(this) = rhs;
- }
-
- FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFenceCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFenceCreateInfo;
- const void* pNext = nullptr;
- vk::FenceCreateFlags flags;
- };
- static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct FenceCreateInfo : public layout::FenceCreateInfo
+ struct FenceCreateInfo
{
- VULKAN_HPP_CONSTEXPR FenceCreateInfo( vk::FenceCreateFlags flags_ = vk::FenceCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::FenceCreateInfo( flags_ )
+ VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::FenceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) - offsetof( FenceCreateInfo, pNext ) );
+ return *this;
+ }
+
FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FenceCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FenceCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>(&rhs);
return *this;
}
@@ -35211,7 +33262,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- FenceCreateInfo & setFlags( vk::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -35239,57 +33290,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FenceCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
};
static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct FenceGetFdInfoKHR
{
- struct FenceGetFdInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , handleType( handleType_ )
- {}
-
- FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFenceGetFdInfoKHR*>(this) = rhs;
- }
-
- FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFenceGetFdInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFenceGetFdInfoKHR;
- const void* pNext = nullptr;
- vk::Fence fence;
- vk::ExternalFenceHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct FenceGetFdInfoKHR : public layout::FenceGetFdInfoKHR
- {
- VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::FenceGetFdInfoKHR( fence_, handleType_ )
+ VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) - offsetof( FenceGetFdInfoKHR, pNext ) );
+ return *this;
+ }
+
FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FenceGetFdInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FenceGetFdInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>(&rhs);
return *this;
}
@@ -35299,13 +33329,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- FenceGetFdInfoKHR & setFence( vk::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
- FenceGetFdInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -35326,7 +33356,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -35334,59 +33364,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FenceGetFdInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct FenceGetWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , handleType( handleType_ )
- {}
-
- FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>(this) = rhs;
- }
-
- FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
- const void* pNext = nullptr;
- vk::Fence fence;
- vk::ExternalFenceHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct FenceGetWin32HandleInfoKHR : public layout::FenceGetWin32HandleInfoKHR
+ struct FenceGetWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::FenceGetWin32HandleInfoKHR( fence_, handleType_ )
+ VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FenceGetWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FenceGetWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -35396,13 +33406,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- FenceGetWin32HandleInfoKHR & setFence( vk::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
- FenceGetWin32HandleInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -35423,7 +33433,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -35431,54 +33441,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FenceGetWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
+ struct FilterCubicImageViewImageFormatPropertiesEXT
{
- struct FilterCubicImageViewImageFormatPropertiesEXT
- {
- protected:
- FilterCubicImageViewImageFormatPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>(this) = rhs;
- }
-
- FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
- void* pNext = nullptr;
- vk::Bool32 filterCubic;
- vk::Bool32 filterCubicMinmax;
- };
- static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct FilterCubicImageViewImageFormatPropertiesEXT : public layout::FilterCubicImageViewImageFormatPropertiesEXT
- {
- FilterCubicImageViewImageFormatPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::FilterCubicImageViewImageFormatPropertiesEXT()
+ FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT
+ : filterCubic( filterCubic_ )
+ , filterCubicMinmax( filterCubicMinmax_ )
{}
+ VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) );
+ return *this;
+ }
+
FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FilterCubicImageViewImageFormatPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FilterCubicImageViewImageFormatPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>(&rhs);
return *this;
}
@@ -35505,25 +33499,33 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FilterCubicImageViewImageFormatPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
};
static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
struct FormatProperties
{
- FormatProperties() VULKAN_HPP_NOEXCEPT
+ FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {},
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : linearTilingFeatures( linearTilingFeatures_ )
+ , optimalTilingFeatures( optimalTilingFeatures_ )
+ , bufferFeatures( bufferFeatures_ )
{}
FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkFormatProperties*>(this) = rhs;
+ *this = rhs;
}
FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkFormatProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>(&rhs);
return *this;
}
@@ -35550,53 +33552,33 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::FormatFeatureFlags linearTilingFeatures;
- vk::FormatFeatureFlags optimalTilingFeatures;
- vk::FormatFeatureFlags bufferFeatures;
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
+ VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
};
static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct FormatProperties2
- {
- protected:
- FormatProperties2() VULKAN_HPP_NOEXCEPT
- {}
-
- FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFormatProperties2*>(this) = rhs;
- }
-
- FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFormatProperties2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFormatProperties2;
- void* pNext = nullptr;
- vk::FormatProperties formatProperties;
- };
- static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "layout struct and wrapper have different size!" );
- }
-
- struct FormatProperties2 : public layout::FormatProperties2
+ struct FormatProperties2
{
- FormatProperties2() VULKAN_HPP_NOEXCEPT
- : layout::FormatProperties2()
+ FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : formatProperties( formatProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::FormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) - offsetof( FormatProperties2, pNext ) );
+ return *this;
+ }
+
FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FormatProperties2( rhs )
- {}
+ {
+ *this = rhs;
+ }
FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FormatProperties2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>(&rhs);
return *this;
}
@@ -35622,139 +33604,108 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FormatProperties2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
};
static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct FramebufferAttachmentImageInfo
{
- struct FramebufferAttachmentImageInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfoKHR( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(),
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- uint32_t width_ = 0,
- uint32_t height_ = 0,
- uint32_t layerCount_ = 0,
- uint32_t viewFormatCount_ = 0,
- const vk::Format* pViewFormats_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , usage( usage_ )
- , width( width_ )
- , height( height_ )
- , layerCount( layerCount_ )
- , viewFormatCount( viewFormatCount_ )
- , pViewFormats( pViewFormats_ )
- {}
-
- FramebufferAttachmentImageInfoKHR( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferAttachmentImageInfoKHR*>(this) = rhs;
- }
-
- FramebufferAttachmentImageInfoKHR& operator=( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferAttachmentImageInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFramebufferAttachmentImageInfoKHR;
- const void* pNext = nullptr;
- vk::ImageCreateFlags flags;
- vk::ImageUsageFlags usage;
- uint32_t width;
- uint32_t height;
- uint32_t layerCount;
- uint32_t viewFormatCount;
- const vk::Format* pViewFormats;
- };
- static_assert( sizeof( FramebufferAttachmentImageInfoKHR ) == sizeof( VkFramebufferAttachmentImageInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct FramebufferAttachmentImageInfoKHR : public layout::FramebufferAttachmentImageInfoKHR
- {
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfoKHR( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(),
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- uint32_t width_ = 0,
- uint32_t height_ = 0,
- uint32_t layerCount_ = 0,
- uint32_t viewFormatCount_ = 0,
- const vk::Format* pViewFormats_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferAttachmentImageInfoKHR( flags_, usage_, width_, height_, layerCount_, viewFormatCount_, pViewFormats_ )
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ uint32_t layerCount_ = {},
+ uint32_t viewFormatCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , usage( usage_ )
+ , width( width_ )
+ , height( height_ )
+ , layerCount( layerCount_ )
+ , viewFormatCount( viewFormatCount_ )
+ , pViewFormats( pViewFormats_ )
{}
- FramebufferAttachmentImageInfoKHR( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferAttachmentImageInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) - offsetof( FramebufferAttachmentImageInfo, pNext ) );
+ return *this;
+ }
- FramebufferAttachmentImageInfoKHR& operator=( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FramebufferAttachmentImageInfoKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ FramebufferAttachmentImageInfo& operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>(&rhs);
return *this;
}
- FramebufferAttachmentImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setFlags( vk::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setUsage( vk::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
{
width = width_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
{
height = height_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
{
layerCount = layerCount_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = viewFormatCount_;
return *this;
}
- FramebufferAttachmentImageInfoKHR & setPViewFormats( const vk::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
pViewFormats = pViewFormats_;
return *this;
}
- operator VkFramebufferAttachmentImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFramebufferAttachmentImageInfoKHR*>( this );
+ return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
}
- operator VkFramebufferAttachmentImageInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFramebufferAttachmentImageInfoKHR*>( this );
+ return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
}
- bool operator==( FramebufferAttachmentImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -35767,94 +33718,79 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pViewFormats == rhs.pViewFormats );
}
- bool operator!=( FramebufferAttachmentImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::FramebufferAttachmentImageInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t layerCount = {};
+ uint32_t viewFormatCount = {};
+ const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
};
- static_assert( sizeof( FramebufferAttachmentImageInfoKHR ) == sizeof( VkFramebufferAttachmentImageInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FramebufferAttachmentImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct FramebufferAttachmentsCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfoKHR( uint32_t attachmentImageInfoCount_ = 0,
- const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : attachmentImageInfoCount( attachmentImageInfoCount_ )
- , pAttachmentImageInfos( pAttachmentImageInfos_ )
- {}
-
- FramebufferAttachmentsCreateInfoKHR( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR*>(this) = rhs;
- }
-
- FramebufferAttachmentsCreateInfoKHR& operator=( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfoKHR;
- const void* pNext = nullptr;
- uint32_t attachmentImageInfoCount;
- const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos;
- };
- static_assert( sizeof( FramebufferAttachmentsCreateInfoKHR ) == sizeof( VkFramebufferAttachmentsCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
- struct FramebufferAttachmentsCreateInfoKHR : public layout::FramebufferAttachmentsCreateInfoKHR
+ struct FramebufferAttachmentsCreateInfo
{
- VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfoKHR( uint32_t attachmentImageInfoCount_ = 0,
- const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferAttachmentsCreateInfoKHR( attachmentImageInfoCount_, pAttachmentImageInfos_ )
+ VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {},
+ const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT
+ : attachmentImageInfoCount( attachmentImageInfoCount_ )
+ , pAttachmentImageInfos( pAttachmentImageInfos_ )
{}
- FramebufferAttachmentsCreateInfoKHR( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferAttachmentsCreateInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) - offsetof( FramebufferAttachmentsCreateInfo, pNext ) );
+ return *this;
+ }
+
+ FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- FramebufferAttachmentsCreateInfoKHR& operator=( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo& operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FramebufferAttachmentsCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>(&rhs);
return *this;
}
- FramebufferAttachmentsCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- FramebufferAttachmentsCreateInfoKHR & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentImageInfoCount = attachmentImageInfoCount_;
return *this;
}
- FramebufferAttachmentsCreateInfoKHR & setPAttachmentImageInfos( const vk::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
{
pAttachmentImageInfos = pAttachmentImageInfos_;
return *this;
}
- operator VkFramebufferAttachmentsCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfoKHR*>( this );
+ return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
}
- operator VkFramebufferAttachmentsCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkFramebufferAttachmentsCreateInfoKHR*>( this );
+ return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
}
- bool operator==( FramebufferAttachmentsCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -35862,82 +33798,52 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
}
- bool operator!=( FramebufferAttachmentsCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::FramebufferAttachmentsCreateInfoKHR::sType;
- };
- static_assert( sizeof( FramebufferAttachmentsCreateInfoKHR ) == sizeof( VkFramebufferAttachmentsCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
+ const void* pNext = {};
+ uint32_t attachmentImageInfoCount = {};
+ const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {};
+ };
+ static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+
+ struct FramebufferCreateInfo
+ {
+ VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {},
+ uint32_t width_ = {},
+ uint32_t height_ = {},
+ uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , renderPass( renderPass_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , width( width_ )
+ , height( height_ )
+ , layers( layers_ )
+ {}
- namespace layout
- {
- struct FramebufferCreateInfo
+ VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & operator=( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( vk::FramebufferCreateFlags flags_ = vk::FramebufferCreateFlags(),
- vk::RenderPass renderPass_ = vk::RenderPass(),
- uint32_t attachmentCount_ = 0,
- const vk::ImageView* pAttachments_ = nullptr,
- uint32_t width_ = 0,
- uint32_t height_ = 0,
- uint32_t layers_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , renderPass( renderPass_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , width( width_ )
- , height( height_ )
- , layers( layers_ )
- {}
-
- FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferCreateInfo*>(this) = rhs;
- }
-
- FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFramebufferCreateInfo;
- const void* pNext = nullptr;
- vk::FramebufferCreateFlags flags;
- vk::RenderPass renderPass;
- uint32_t attachmentCount;
- const vk::ImageView* pAttachments;
- uint32_t width;
- uint32_t height;
- uint32_t layers;
- };
- static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct FramebufferCreateInfo : public layout::FramebufferCreateInfo
- {
- VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( vk::FramebufferCreateFlags flags_ = vk::FramebufferCreateFlags(),
- vk::RenderPass renderPass_ = vk::RenderPass(),
- uint32_t attachmentCount_ = 0,
- const vk::ImageView* pAttachments_ = nullptr,
- uint32_t width_ = 0,
- uint32_t height_ = 0,
- uint32_t layers_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferCreateInfo( flags_, renderPass_, attachmentCount_, pAttachments_, width_, height_, layers_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) - offsetof( FramebufferCreateInfo, pNext ) );
+ return *this;
+ }
FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FramebufferCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>(&rhs);
return *this;
}
@@ -35947,13 +33853,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- FramebufferCreateInfo & setFlags( vk::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- FramebufferCreateInfo & setRenderPass( vk::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
@@ -35965,7 +33871,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- FramebufferCreateInfo & setPAttachments( const vk::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
@@ -36017,55 +33923,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FramebufferCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
+ uint32_t width = {};
+ uint32_t height = {};
+ uint32_t layers = {};
};
static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct FramebufferMixedSamplesCombinationNV
- {
- protected:
- FramebufferMixedSamplesCombinationNV() VULKAN_HPP_NOEXCEPT
- {}
-
- FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>(this) = rhs;
- }
-
- FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
- void* pNext = nullptr;
- vk::CoverageReductionModeNV coverageReductionMode;
- vk::SampleCountFlagBits rasterizationSamples;
- vk::SampleCountFlags depthStencilSamples;
- vk::SampleCountFlags colorSamples;
- };
- static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "layout struct and wrapper have different size!" );
- }
-
- struct FramebufferMixedSamplesCombinationNV : public layout::FramebufferMixedSamplesCombinationNV
+ struct FramebufferMixedSamplesCombinationNV
{
- FramebufferMixedSamplesCombinationNV() VULKAN_HPP_NOEXCEPT
- : layout::FramebufferMixedSamplesCombinationNV()
+ FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT
+ : coverageReductionMode( coverageReductionMode_ )
+ , rasterizationSamples( rasterizationSamples_ )
+ , depthStencilSamples( depthStencilSamples_ )
+ , colorSamples( colorSamples_ )
{}
+ VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV & operator=( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) );
+ return *this;
+ }
+
FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::FramebufferMixedSamplesCombinationNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::FramebufferMixedSamplesCombinationNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>(&rhs);
return *this;
}
@@ -36084,7 +33981,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( coverageReductionMode == rhs.coverageReductionMode )
- && vk::operator==( rasterizationSamples, rhs.rasterizationSamples )
+ && ( rasterizationSamples == rhs.rasterizationSamples )
&& ( depthStencilSamples == rhs.depthStencilSamples )
&& ( colorSamples == rhs.colorSamples );
}
@@ -36094,17 +33991,22 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::FramebufferMixedSamplesCombinationNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
};
static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
struct VertexInputBindingDescription
{
- VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = 0,
- uint32_t stride_ = 0,
- vk::VertexInputRate inputRate_ = vk::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {},
+ uint32_t stride_ = {},
+ VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
: binding( binding_ )
, stride( stride_ )
, inputRate( inputRate_ )
@@ -36112,12 +34014,12 @@ namespace VULKAN_HPP_NAMESPACE
VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkVertexInputBindingDescription*>(this) = rhs;
+ *this = rhs;
}
VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkVertexInputBindingDescription*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>(&rhs);
return *this;
}
@@ -36133,7 +34035,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VertexInputBindingDescription & setInputRate( vk::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
+ VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
{
inputRate = inputRate_;
return *this;
@@ -36162,19 +34064,19 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t binding;
- uint32_t stride;
- vk::VertexInputRate inputRate;
+ uint32_t binding = {};
+ uint32_t stride = {};
+ VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
};
static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
struct VertexInputAttributeDescription
{
- VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = 0,
- uint32_t binding_ = 0,
- vk::Format format_ = vk::Format::eUndefined,
- uint32_t offset_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {},
+ uint32_t binding_ = {},
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT
: location( location_ )
, binding( binding_ )
, format( format_ )
@@ -36183,12 +34085,12 @@ namespace VULKAN_HPP_NAMESPACE
VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkVertexInputAttributeDescription*>(this) = rhs;
+ *this = rhs;
}
VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkVertexInputAttributeDescription*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>(&rhs);
return *this;
}
@@ -36204,7 +34106,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- VertexInputAttributeDescription & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
@@ -36240,71 +34142,42 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t location;
- uint32_t binding;
- vk::Format format;
- uint32_t offset;
+ uint32_t location = {};
+ uint32_t binding = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ uint32_t offset = {};
};
static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineVertexInputStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( vk::PipelineVertexInputStateCreateFlags flags_ = vk::PipelineVertexInputStateCreateFlags(),
- uint32_t vertexBindingDescriptionCount_ = 0,
- const vk::VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr,
- uint32_t vertexAttributeDescriptionCount_ = 0,
- const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
- , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
- , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
- , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
- {}
-
- PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>(this) = rhs;
- }
-
- PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineVertexInputStateCreateFlags flags;
- uint32_t vertexBindingDescriptionCount;
- const vk::VertexInputBindingDescription* pVertexBindingDescriptions;
- uint32_t vertexAttributeDescriptionCount;
- const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions;
- };
- static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineVertexInputStateCreateInfo : public layout::PipelineVertexInputStateCreateInfo
+ struct PipelineVertexInputStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( vk::PipelineVertexInputStateCreateFlags flags_ = vk::PipelineVertexInputStateCreateFlags(),
- uint32_t vertexBindingDescriptionCount_ = 0,
- const vk::VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr,
- uint32_t vertexAttributeDescriptionCount_ = 0,
- const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineVertexInputStateCreateInfo( flags_, vertexBindingDescriptionCount_, pVertexBindingDescriptions_, vertexAttributeDescriptionCount_, pVertexAttributeDescriptions_ )
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {},
+ uint32_t vertexBindingDescriptionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {},
+ uint32_t vertexAttributeDescriptionCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
+ , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
+ , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
+ , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineVertexInputStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineVertexInputStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>(&rhs);
return *this;
}
@@ -36314,7 +34187,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineVertexInputStateCreateInfo & setFlags( vk::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -36326,7 +34199,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const vk::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBindingDescriptions = pVertexBindingDescriptions_;
return *this;
@@ -36338,7 +34211,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const vk::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
{
pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
return *this;
@@ -36370,61 +34243,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineVertexInputStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
+ uint32_t vertexBindingDescriptionCount = {};
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {};
+ uint32_t vertexAttributeDescriptionCount = {};
+ const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {};
};
static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineInputAssemblyStateCreateInfo
{
- struct PipelineInputAssemblyStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( vk::PipelineInputAssemblyStateCreateFlags flags_ = vk::PipelineInputAssemblyStateCreateFlags(),
- vk::PrimitiveTopology topology_ = vk::PrimitiveTopology::ePointList,
- vk::Bool32 primitiveRestartEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , topology( topology_ )
- , primitiveRestartEnable( primitiveRestartEnable_ )
- {}
-
- PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>(this) = rhs;
- }
-
- PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineInputAssemblyStateCreateFlags flags;
- vk::PrimitiveTopology topology;
- vk::Bool32 primitiveRestartEnable;
- };
- static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineInputAssemblyStateCreateInfo : public layout::PipelineInputAssemblyStateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( vk::PipelineInputAssemblyStateCreateFlags flags_ = vk::PipelineInputAssemblyStateCreateFlags(),
- vk::PrimitiveTopology topology_ = vk::PrimitiveTopology::ePointList,
- vk::Bool32 primitiveRestartEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineInputAssemblyStateCreateInfo( flags_, topology_, primitiveRestartEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , topology( topology_ )
+ , primitiveRestartEnable( primitiveRestartEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineInputAssemblyStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineInputAssemblyStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>(&rhs);
return *this;
}
@@ -36434,19 +34288,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setFlags( vk::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setTopology( vk::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
{
topology = topology_;
return *this;
}
- PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( vk::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
{
primitiveRestartEnable = primitiveRestartEnable_;
return *this;
@@ -36476,57 +34330,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineInputAssemblyStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
};
static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineTessellationStateCreateInfo
{
- struct PipelineTessellationStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( vk::PipelineTessellationStateCreateFlags flags_ = vk::PipelineTessellationStateCreateFlags(),
- uint32_t patchControlPoints_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , patchControlPoints( patchControlPoints_ )
- {}
-
- PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>(this) = rhs;
- }
-
- PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineTessellationStateCreateFlags flags;
- uint32_t patchControlPoints;
- };
- static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineTessellationStateCreateInfo : public layout::PipelineTessellationStateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( vk::PipelineTessellationStateCreateFlags flags_ = vk::PipelineTessellationStateCreateFlags(),
- uint32_t patchControlPoints_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineTessellationStateCreateInfo( flags_, patchControlPoints_ )
+ VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {},
+ uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , patchControlPoints( patchControlPoints_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineTessellationStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineTessellationStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>(&rhs);
return *this;
}
@@ -36536,7 +34371,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineTessellationStateCreateInfo & setFlags( vk::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -36571,20 +34406,23 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineTessellationStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
+ uint32_t patchControlPoints = {};
};
static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct Viewport
{
- VULKAN_HPP_CONSTEXPR Viewport( float x_ = 0,
- float y_ = 0,
- float width_ = 0,
- float height_ = 0,
- float minDepth_ = 0,
- float maxDepth_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR Viewport( float x_ = {},
+ float y_ = {},
+ float width_ = {},
+ float height_ = {},
+ float minDepth_ = {},
+ float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, width( width_ )
@@ -36595,12 +34433,12 @@ namespace VULKAN_HPP_NAMESPACE
Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkViewport*>(this) = rhs;
+ *this = rhs;
}
Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkViewport*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>(&rhs);
return *this;
}
@@ -36666,73 +34504,44 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- float x;
- float y;
- float width;
- float height;
- float minDepth;
- float maxDepth;
+ float x = {};
+ float y = {};
+ float width = {};
+ float height = {};
+ float minDepth = {};
+ float maxDepth = {};
};
static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineViewportStateCreateInfo
{
- struct PipelineViewportStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( vk::PipelineViewportStateCreateFlags flags_ = vk::PipelineViewportStateCreateFlags(),
- uint32_t viewportCount_ = 0,
- const vk::Viewport* pViewports_ = nullptr,
- uint32_t scissorCount_ = 0,
- const vk::Rect2D* pScissors_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , viewportCount( viewportCount_ )
- , pViewports( pViewports_ )
- , scissorCount( scissorCount_ )
- , pScissors( pScissors_ )
- {}
-
- PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportStateCreateInfo*>(this) = rhs;
- }
-
- PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineViewportStateCreateFlags flags;
- uint32_t viewportCount;
- const vk::Viewport* pViewports;
- uint32_t scissorCount;
- const vk::Rect2D* pScissors;
- };
- static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineViewportStateCreateInfo : public layout::PipelineViewportStateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( vk::PipelineViewportStateCreateFlags flags_ = vk::PipelineViewportStateCreateFlags(),
- uint32_t viewportCount_ = 0,
- const vk::Viewport* pViewports_ = nullptr,
- uint32_t scissorCount_ = 0,
- const vk::Rect2D* pScissors_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportStateCreateInfo( flags_, viewportCount_, pViewports_, scissorCount_, pScissors_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {},
+ uint32_t scissorCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , viewportCount( viewportCount_ )
+ , pViewports( pViewports_ )
+ , scissorCount( scissorCount_ )
+ , pScissors( pScissors_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineViewportStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>(&rhs);
return *this;
}
@@ -36742,7 +34551,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportStateCreateInfo & setFlags( vk::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -36754,7 +34563,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportStateCreateInfo & setPViewports( const vk::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
{
pViewports = pViewports_;
return *this;
@@ -36766,7 +34575,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportStateCreateInfo & setPScissors( const vk::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
{
pScissors = pScissors_;
return *this;
@@ -36798,93 +34607,58 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineViewportStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
+ uint32_t viewportCount = {};
+ const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {};
+ uint32_t scissorCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {};
};
static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineRasterizationStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( vk::PipelineRasterizationStateCreateFlags flags_ = vk::PipelineRasterizationStateCreateFlags(),
- vk::Bool32 depthClampEnable_ = 0,
- vk::Bool32 rasterizerDiscardEnable_ = 0,
- vk::PolygonMode polygonMode_ = vk::PolygonMode::eFill,
- vk::CullModeFlags cullMode_ = vk::CullModeFlags(),
- vk::FrontFace frontFace_ = vk::FrontFace::eCounterClockwise,
- vk::Bool32 depthBiasEnable_ = 0,
- float depthBiasConstantFactor_ = 0,
- float depthBiasClamp_ = 0,
- float depthBiasSlopeFactor_ = 0,
- float lineWidth_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , depthClampEnable( depthClampEnable_ )
- , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
- , polygonMode( polygonMode_ )
- , cullMode( cullMode_ )
- , frontFace( frontFace_ )
- , depthBiasEnable( depthBiasEnable_ )
- , depthBiasConstantFactor( depthBiasConstantFactor_ )
- , depthBiasClamp( depthBiasClamp_ )
- , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
- , lineWidth( lineWidth_ )
- {}
-
- PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>(this) = rhs;
- }
-
- PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineRasterizationStateCreateFlags flags;
- vk::Bool32 depthClampEnable;
- vk::Bool32 rasterizerDiscardEnable;
- vk::PolygonMode polygonMode;
- vk::CullModeFlags cullMode;
- vk::FrontFace frontFace;
- vk::Bool32 depthBiasEnable;
- float depthBiasConstantFactor;
- float depthBiasClamp;
- float depthBiasSlopeFactor;
- float lineWidth;
- };
- static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRasterizationStateCreateInfo : public layout::PipelineRasterizationStateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( vk::PipelineRasterizationStateCreateFlags flags_ = vk::PipelineRasterizationStateCreateFlags(),
- vk::Bool32 depthClampEnable_ = 0,
- vk::Bool32 rasterizerDiscardEnable_ = 0,
- vk::PolygonMode polygonMode_ = vk::PolygonMode::eFill,
- vk::CullModeFlags cullMode_ = vk::CullModeFlags(),
- vk::FrontFace frontFace_ = vk::FrontFace::eCounterClockwise,
- vk::Bool32 depthBiasEnable_ = 0,
- float depthBiasConstantFactor_ = 0,
- float depthBiasClamp_ = 0,
- float depthBiasSlopeFactor_ = 0,
- float lineWidth_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationStateCreateInfo( flags_, depthClampEnable_, rasterizerDiscardEnable_, polygonMode_, cullMode_, frontFace_, depthBiasEnable_, depthBiasConstantFactor_, depthBiasClamp_, depthBiasSlopeFactor_, lineWidth_ )
+ struct PipelineRasterizationStateCreateInfo
+ {
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {},
+ VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
+ VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {},
+ VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {},
+ float depthBiasConstantFactor_ = {},
+ float depthBiasClamp_ = {},
+ float depthBiasSlopeFactor_ = {},
+ float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , depthClampEnable( depthClampEnable_ )
+ , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
+ , polygonMode( polygonMode_ )
+ , cullMode( cullMode_ )
+ , frontFace( frontFace_ )
+ , depthBiasEnable( depthBiasEnable_ )
+ , depthBiasConstantFactor( depthBiasConstantFactor_ )
+ , depthBiasClamp( depthBiasClamp_ )
+ , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
+ , lineWidth( lineWidth_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRasterizationStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>(&rhs);
return *this;
}
@@ -36894,43 +34668,43 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRasterizationStateCreateInfo & setFlags( vk::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthClampEnable( vk::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClampEnable = depthClampEnable_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( vk::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
{
rasterizerDiscardEnable = rasterizerDiscardEnable_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setPolygonMode( vk::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
{
polygonMode = polygonMode_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setCullMode( vk::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
{
cullMode = cullMode_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setFrontFace( vk::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
{
frontFace = frontFace_;
return *this;
}
- PipelineRasterizationStateCreateInfo & setDepthBiasEnable( vk::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthBiasEnable = depthBiasEnable_;
return *this;
@@ -36992,77 +34766,56 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRasterizationStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
+ VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
+ VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
+ VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
+ VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
+ float depthBiasConstantFactor = {};
+ float depthBiasClamp = {};
+ float depthBiasSlopeFactor = {};
+ float lineWidth = {};
};
static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineMultisampleStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( vk::PipelineMultisampleStateCreateFlags flags_ = vk::PipelineMultisampleStateCreateFlags(),
- vk::SampleCountFlagBits rasterizationSamples_ = vk::SampleCountFlagBits::e1,
- vk::Bool32 sampleShadingEnable_ = 0,
- float minSampleShading_ = 0,
- const vk::SampleMask* pSampleMask_ = nullptr,
- vk::Bool32 alphaToCoverageEnable_ = 0,
- vk::Bool32 alphaToOneEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , rasterizationSamples( rasterizationSamples_ )
- , sampleShadingEnable( sampleShadingEnable_ )
- , minSampleShading( minSampleShading_ )
- , pSampleMask( pSampleMask_ )
- , alphaToCoverageEnable( alphaToCoverageEnable_ )
- , alphaToOneEnable( alphaToOneEnable_ )
- {}
-
- PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>(this) = rhs;
- }
-
- PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineMultisampleStateCreateFlags flags;
- vk::SampleCountFlagBits rasterizationSamples;
- vk::Bool32 sampleShadingEnable;
- float minSampleShading;
- const vk::SampleMask* pSampleMask;
- vk::Bool32 alphaToCoverageEnable;
- vk::Bool32 alphaToOneEnable;
- };
- static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineMultisampleStateCreateInfo : public layout::PipelineMultisampleStateCreateInfo
+ struct PipelineMultisampleStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( vk::PipelineMultisampleStateCreateFlags flags_ = vk::PipelineMultisampleStateCreateFlags(),
- vk::SampleCountFlagBits rasterizationSamples_ = vk::SampleCountFlagBits::e1,
- vk::Bool32 sampleShadingEnable_ = 0,
- float minSampleShading_ = 0,
- const vk::SampleMask* pSampleMask_ = nullptr,
- vk::Bool32 alphaToCoverageEnable_ = 0,
- vk::Bool32 alphaToOneEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineMultisampleStateCreateInfo( flags_, rasterizationSamples_, sampleShadingEnable_, minSampleShading_, pSampleMask_, alphaToCoverageEnable_, alphaToOneEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {},
+ float minSampleShading_ = {},
+ const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , rasterizationSamples( rasterizationSamples_ )
+ , sampleShadingEnable( sampleShadingEnable_ )
+ , minSampleShading( minSampleShading_ )
+ , pSampleMask( pSampleMask_ )
+ , alphaToCoverageEnable( alphaToCoverageEnable_ )
+ , alphaToOneEnable( alphaToOneEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineMultisampleStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineMultisampleStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>(&rhs);
return *this;
}
@@ -37072,19 +34825,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineMultisampleStateCreateInfo & setFlags( vk::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setRasterizationSamples( vk::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationSamples = rasterizationSamples_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setSampleShadingEnable( vk::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
{
sampleShadingEnable = sampleShadingEnable_;
return *this;
@@ -37096,19 +34849,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineMultisampleStateCreateInfo & setPSampleMask( const vk::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
{
pSampleMask = pSampleMask_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( vk::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
{
alphaToCoverageEnable = alphaToCoverageEnable_;
return *this;
}
- PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( vk::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
{
alphaToOneEnable = alphaToOneEnable_;
return *this;
@@ -37129,7 +34882,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
- && vk::operator==( rasterizationSamples, rhs.rasterizationSamples )
+ && ( rasterizationSamples == rhs.rasterizationSamples )
&& ( sampleShadingEnable == rhs.sampleShadingEnable )
&& ( minSampleShading == rhs.minSampleShading )
&& ( pSampleMask == rhs.pSampleMask )
@@ -37142,21 +34895,29 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineMultisampleStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
+ float minSampleShading = {};
+ const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
};
static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct StencilOpState
{
- VULKAN_HPP_CONSTEXPR StencilOpState( vk::StencilOp failOp_ = vk::StencilOp::eKeep,
- vk::StencilOp passOp_ = vk::StencilOp::eKeep,
- vk::StencilOp depthFailOp_ = vk::StencilOp::eKeep,
- vk::CompareOp compareOp_ = vk::CompareOp::eNever,
- uint32_t compareMask_ = 0,
- uint32_t writeMask_ = 0,
- uint32_t reference_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+ VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+ uint32_t compareMask_ = {},
+ uint32_t writeMask_ = {},
+ uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT
: failOp( failOp_ )
, passOp( passOp_ )
, depthFailOp( depthFailOp_ )
@@ -37168,34 +34929,34 @@ namespace VULKAN_HPP_NAMESPACE
StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkStencilOpState*>(this) = rhs;
+ *this = rhs;
}
StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkStencilOpState*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>(&rhs);
return *this;
}
- StencilOpState & setFailOp( vk::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
{
failOp = failOp_;
return *this;
}
- StencilOpState & setPassOp( vk::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
{
passOp = passOp_;
return *this;
}
- StencilOpState & setDepthFailOp( vk::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
{
depthFailOp = depthFailOp_;
return *this;
}
- StencilOpState & setCompareOp( vk::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
compareOp = compareOp_;
return *this;
@@ -37246,94 +35007,55 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::StencilOp failOp;
- vk::StencilOp passOp;
- vk::StencilOp depthFailOp;
- vk::CompareOp compareOp;
- uint32_t compareMask;
- uint32_t writeMask;
- uint32_t reference;
+ VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+ VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+ VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ uint32_t compareMask = {};
+ uint32_t writeMask = {};
+ uint32_t reference = {};
};
static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineDepthStencilStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( vk::PipelineDepthStencilStateCreateFlags flags_ = vk::PipelineDepthStencilStateCreateFlags(),
- vk::Bool32 depthTestEnable_ = 0,
- vk::Bool32 depthWriteEnable_ = 0,
- vk::CompareOp depthCompareOp_ = vk::CompareOp::eNever,
- vk::Bool32 depthBoundsTestEnable_ = 0,
- vk::Bool32 stencilTestEnable_ = 0,
- vk::StencilOpState front_ = vk::StencilOpState(),
- vk::StencilOpState back_ = vk::StencilOpState(),
- float minDepthBounds_ = 0,
- float maxDepthBounds_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , depthTestEnable( depthTestEnable_ )
- , depthWriteEnable( depthWriteEnable_ )
- , depthCompareOp( depthCompareOp_ )
- , depthBoundsTestEnable( depthBoundsTestEnable_ )
- , stencilTestEnable( stencilTestEnable_ )
- , front( front_ )
- , back( back_ )
- , minDepthBounds( minDepthBounds_ )
- , maxDepthBounds( maxDepthBounds_ )
- {}
-
- PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>(this) = rhs;
- }
-
- PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineDepthStencilStateCreateFlags flags;
- vk::Bool32 depthTestEnable;
- vk::Bool32 depthWriteEnable;
- vk::CompareOp depthCompareOp;
- vk::Bool32 depthBoundsTestEnable;
- vk::Bool32 stencilTestEnable;
- vk::StencilOpState front;
- vk::StencilOpState back;
- float minDepthBounds;
- float maxDepthBounds;
- };
- static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineDepthStencilStateCreateInfo : public layout::PipelineDepthStencilStateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( vk::PipelineDepthStencilStateCreateFlags flags_ = vk::PipelineDepthStencilStateCreateFlags(),
- vk::Bool32 depthTestEnable_ = 0,
- vk::Bool32 depthWriteEnable_ = 0,
- vk::CompareOp depthCompareOp_ = vk::CompareOp::eNever,
- vk::Bool32 depthBoundsTestEnable_ = 0,
- vk::Bool32 stencilTestEnable_ = 0,
- vk::StencilOpState front_ = vk::StencilOpState(),
- vk::StencilOpState back_ = vk::StencilOpState(),
- float minDepthBounds_ = 0,
- float maxDepthBounds_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineDepthStencilStateCreateInfo( flags_, depthTestEnable_, depthWriteEnable_, depthCompareOp_, depthBoundsTestEnable_, stencilTestEnable_, front_, back_, minDepthBounds_, maxDepthBounds_ )
+ struct PipelineDepthStencilStateCreateInfo
+ {
+ VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {},
+ VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+ VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {},
+ VULKAN_HPP_NAMESPACE::StencilOpState front_ = {},
+ VULKAN_HPP_NAMESPACE::StencilOpState back_ = {},
+ float minDepthBounds_ = {},
+ float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , depthTestEnable( depthTestEnable_ )
+ , depthWriteEnable( depthWriteEnable_ )
+ , depthCompareOp( depthCompareOp_ )
+ , depthBoundsTestEnable( depthBoundsTestEnable_ )
+ , stencilTestEnable( stencilTestEnable_ )
+ , front( front_ )
+ , back( back_ )
+ , minDepthBounds( minDepthBounds_ )
+ , maxDepthBounds( maxDepthBounds_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineDepthStencilStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineDepthStencilStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>(&rhs);
return *this;
}
@@ -37343,49 +35065,49 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineDepthStencilStateCreateInfo & setFlags( vk::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthTestEnable( vk::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthTestEnable = depthTestEnable_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( vk::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthWriteEnable = depthWriteEnable_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthCompareOp( vk::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
{
depthCompareOp = depthCompareOp_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( vk::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthBoundsTestEnable = depthBoundsTestEnable_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setStencilTestEnable( vk::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
stencilTestEnable = stencilTestEnable_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setFront( vk::StencilOpState front_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState front_ ) VULKAN_HPP_NOEXCEPT
{
front = front_;
return *this;
}
- PipelineDepthStencilStateCreateInfo & setBack( vk::StencilOpState back_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState back_ ) VULKAN_HPP_NOEXCEPT
{
back = back_;
return *this;
@@ -37434,22 +35156,33 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineDepthStencilStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
+ VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
+ VULKAN_HPP_NAMESPACE::StencilOpState front = {};
+ VULKAN_HPP_NAMESPACE::StencilOpState back = {};
+ float minDepthBounds = {};
+ float maxDepthBounds = {};
};
static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct PipelineColorBlendAttachmentState
{
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( vk::Bool32 blendEnable_ = 0,
- vk::BlendFactor srcColorBlendFactor_ = vk::BlendFactor::eZero,
- vk::BlendFactor dstColorBlendFactor_ = vk::BlendFactor::eZero,
- vk::BlendOp colorBlendOp_ = vk::BlendOp::eAdd,
- vk::BlendFactor srcAlphaBlendFactor_ = vk::BlendFactor::eZero,
- vk::BlendFactor dstAlphaBlendFactor_ = vk::BlendFactor::eZero,
- vk::BlendOp alphaBlendOp_ = vk::BlendOp::eAdd,
- vk::ColorComponentFlags colorWriteMask_ = vk::ColorComponentFlags() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {},
+ VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
+ VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT
: blendEnable( blendEnable_ )
, srcColorBlendFactor( srcColorBlendFactor_ )
, dstColorBlendFactor( dstColorBlendFactor_ )
@@ -37462,58 +35195,58 @@ namespace VULKAN_HPP_NAMESPACE
PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPipelineColorBlendAttachmentState*>(this) = rhs;
+ *this = rhs;
}
PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPipelineColorBlendAttachmentState*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>(&rhs);
return *this;
}
- PipelineColorBlendAttachmentState & setBlendEnable( vk::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
{
blendEnable = blendEnable_;
return *this;
}
- PipelineColorBlendAttachmentState & setSrcColorBlendFactor( vk::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcColorBlendFactor = srcColorBlendFactor_;
return *this;
}
- PipelineColorBlendAttachmentState & setDstColorBlendFactor( vk::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstColorBlendFactor = dstColorBlendFactor_;
return *this;
}
- PipelineColorBlendAttachmentState & setColorBlendOp( vk::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
colorBlendOp = colorBlendOp_;
return *this;
}
- PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( vk::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
srcAlphaBlendFactor = srcAlphaBlendFactor_;
return *this;
}
- PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( vk::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
{
dstAlphaBlendFactor = dstAlphaBlendFactor_;
return *this;
}
- PipelineColorBlendAttachmentState & setAlphaBlendOp( vk::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
{
alphaBlendOp = alphaBlendOp_;
return *this;
}
- PipelineColorBlendAttachmentState & setColorWriteMask( vk::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
{
colorWriteMask = colorWriteMask_;
return *this;
@@ -37547,81 +35280,50 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Bool32 blendEnable;
- vk::BlendFactor srcColorBlendFactor;
- vk::BlendFactor dstColorBlendFactor;
- vk::BlendOp colorBlendOp;
- vk::BlendFactor srcAlphaBlendFactor;
- vk::BlendFactor dstAlphaBlendFactor;
- vk::BlendOp alphaBlendOp;
- vk::ColorComponentFlags colorWriteMask;
+ VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
+ VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
+ VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
+ VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
};
static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineColorBlendStateCreateInfo
{
- struct PipelineColorBlendStateCreateInfo
+ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {},
+ VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {},
+ std::array<float,4> const& blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , logicOpEnable( logicOpEnable_ )
+ , logicOp( logicOp_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , blendConstants{}
{
- protected:
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( vk::PipelineColorBlendStateCreateFlags flags_ = vk::PipelineColorBlendStateCreateFlags(),
- vk::Bool32 logicOpEnable_ = 0,
- vk::LogicOp logicOp_ = vk::LogicOp::eClear,
- uint32_t attachmentCount_ = 0,
- const vk::PipelineColorBlendAttachmentState* pAttachments_ = nullptr,
- std::array<float,4> const& blendConstants_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , logicOpEnable( logicOpEnable_ )
- , logicOp( logicOp_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , blendConstants{}
- {
- vk::ConstExpressionArrayCopy<float,4,4>::copy( blendConstants, blendConstants_ );
- }
-
- PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>(this) = rhs;
- }
-
- PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineColorBlendStateCreateFlags flags;
- vk::Bool32 logicOpEnable;
- vk::LogicOp logicOp;
- uint32_t attachmentCount;
- const vk::PipelineColorBlendAttachmentState* pAttachments;
- float blendConstants[4];
- };
- static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,4,4>::copy( blendConstants, blendConstants_ );
+ }
- struct PipelineColorBlendStateCreateInfo : public layout::PipelineColorBlendStateCreateInfo
- {
- VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( vk::PipelineColorBlendStateCreateFlags flags_ = vk::PipelineColorBlendStateCreateFlags(),
- vk::Bool32 logicOpEnable_ = 0,
- vk::LogicOp logicOp_ = vk::LogicOp::eClear,
- uint32_t attachmentCount_ = 0,
- const vk::PipelineColorBlendAttachmentState* pAttachments_ = nullptr,
- std::array<float,4> const& blendConstants_ = { { 0 } } ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineColorBlendStateCreateInfo( flags_, logicOpEnable_, logicOp_, attachmentCount_, pAttachments_, blendConstants_ )
- {}
+ VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) );
+ return *this;
+ }
PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineColorBlendStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineColorBlendStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>(&rhs);
return *this;
}
@@ -37631,19 +35333,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineColorBlendStateCreateInfo & setFlags( vk::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineColorBlendStateCreateInfo & setLogicOpEnable( vk::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
{
logicOpEnable = logicOpEnable_;
return *this;
}
- PipelineColorBlendStateCreateInfo & setLogicOp( vk::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
{
logicOp = logicOp_;
return *this;
@@ -37655,7 +35357,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineColorBlendStateCreateInfo & setPAttachments( const vk::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
@@ -37694,61 +35396,43 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineColorBlendStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
+ VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {};
+ float blendConstants[4] = {};
};
static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineDynamicStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( vk::PipelineDynamicStateCreateFlags flags_ = vk::PipelineDynamicStateCreateFlags(),
- uint32_t dynamicStateCount_ = 0,
- const vk::DynamicState* pDynamicStates_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , dynamicStateCount( dynamicStateCount_ )
- , pDynamicStates( pDynamicStates_ )
- {}
-
- PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>(this) = rhs;
- }
-
- PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineDynamicStateCreateFlags flags;
- uint32_t dynamicStateCount;
- const vk::DynamicState* pDynamicStates;
- };
- static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineDynamicStateCreateInfo : public layout::PipelineDynamicStateCreateInfo
+ struct PipelineDynamicStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( vk::PipelineDynamicStateCreateFlags flags_ = vk::PipelineDynamicStateCreateFlags(),
- uint32_t dynamicStateCount_ = 0,
- const vk::DynamicState* pDynamicStates_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineDynamicStateCreateInfo( flags_, dynamicStateCount_, pDynamicStates_ )
+ VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {},
+ uint32_t dynamicStateCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , dynamicStateCount( dynamicStateCount_ )
+ , pDynamicStates( pDynamicStates_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineDynamicStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineDynamicStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>(&rhs);
return *this;
}
@@ -37758,7 +35442,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineDynamicStateCreateInfo & setFlags( vk::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -37770,7 +35454,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineDynamicStateCreateInfo & setPDynamicStates( const vk::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicStates = pDynamicStates_;
return *this;
@@ -37800,117 +35484,68 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineDynamicStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
+ uint32_t dynamicStateCount = {};
+ const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {};
};
static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct GraphicsPipelineCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR GraphicsPipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(),
- uint32_t stageCount_ = 0,
- const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr,
- const vk::PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr,
- const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr,
- const vk::PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr,
- const vk::PipelineViewportStateCreateInfo* pViewportState_ = nullptr,
- const vk::PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr,
- const vk::PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr,
- const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr,
- const vk::PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr,
- const vk::PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr,
- vk::PipelineLayout layout_ = vk::PipelineLayout(),
- vk::RenderPass renderPass_ = vk::RenderPass(),
- uint32_t subpass_ = 0,
- vk::Pipeline basePipelineHandle_ = vk::Pipeline(),
- int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stageCount( stageCount_ )
- , pStages( pStages_ )
- , pVertexInputState( pVertexInputState_ )
- , pInputAssemblyState( pInputAssemblyState_ )
- , pTessellationState( pTessellationState_ )
- , pViewportState( pViewportState_ )
- , pRasterizationState( pRasterizationState_ )
- , pMultisampleState( pMultisampleState_ )
- , pDepthStencilState( pDepthStencilState_ )
- , pColorBlendState( pColorBlendState_ )
- , pDynamicState( pDynamicState_ )
- , layout( layout_ )
- , renderPass( renderPass_ )
- , subpass( subpass_ )
- , basePipelineHandle( basePipelineHandle_ )
- , basePipelineIndex( basePipelineIndex_ )
- {}
-
- GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGraphicsPipelineCreateInfo*>(this) = rhs;
- }
-
- GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkGraphicsPipelineCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineCreateFlags flags;
- uint32_t stageCount;
- const vk::PipelineShaderStageCreateInfo* pStages;
- const vk::PipelineVertexInputStateCreateInfo* pVertexInputState;
- const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
- const vk::PipelineTessellationStateCreateInfo* pTessellationState;
- const vk::PipelineViewportStateCreateInfo* pViewportState;
- const vk::PipelineRasterizationStateCreateInfo* pRasterizationState;
- const vk::PipelineMultisampleStateCreateInfo* pMultisampleState;
- const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState;
- const vk::PipelineColorBlendStateCreateInfo* pColorBlendState;
- const vk::PipelineDynamicStateCreateInfo* pDynamicState;
- vk::PipelineLayout layout;
- vk::RenderPass renderPass;
- uint32_t subpass;
- vk::Pipeline basePipelineHandle;
- int32_t basePipelineIndex;
- };
- static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct GraphicsPipelineCreateInfo : public layout::GraphicsPipelineCreateInfo
- {
- VULKAN_HPP_CONSTEXPR GraphicsPipelineCreateInfo( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(),
- uint32_t stageCount_ = 0,
- const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr,
- const vk::PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr,
- const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr,
- const vk::PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr,
- const vk::PipelineViewportStateCreateInfo* pViewportState_ = nullptr,
- const vk::PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr,
- const vk::PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr,
- const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr,
- const vk::PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr,
- const vk::PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr,
- vk::PipelineLayout layout_ = vk::PipelineLayout(),
- vk::RenderPass renderPass_ = vk::RenderPass(),
- uint32_t subpass_ = 0,
- vk::Pipeline basePipelineHandle_ = vk::Pipeline(),
- int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::GraphicsPipelineCreateInfo( flags_, stageCount_, pStages_, pVertexInputState_, pInputAssemblyState_, pTessellationState_, pViewportState_, pRasterizationState_, pMultisampleState_, pDepthStencilState_, pColorBlendState_, pDynamicState_, layout_, renderPass_, subpass_, basePipelineHandle_, basePipelineIndex_ )
+ struct GraphicsPipelineCreateInfo
+ {
+ VULKAN_HPP_CONSTEXPR GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ uint32_t subpass_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , stageCount( stageCount_ )
+ , pStages( pStages_ )
+ , pVertexInputState( pVertexInputState_ )
+ , pInputAssemblyState( pInputAssemblyState_ )
+ , pTessellationState( pTessellationState_ )
+ , pViewportState( pViewportState_ )
+ , pRasterizationState( pRasterizationState_ )
+ , pMultisampleState( pMultisampleState_ )
+ , pDepthStencilState( pDepthStencilState_ )
+ , pColorBlendState( pColorBlendState_ )
+ , pDynamicState( pDynamicState_ )
+ , layout( layout_ )
+ , renderPass( renderPass_ )
+ , subpass( subpass_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & operator=( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) );
+ return *this;
+ }
+
GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::GraphicsPipelineCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::GraphicsPipelineCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>(&rhs);
return *this;
}
@@ -37920,7 +35555,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GraphicsPipelineCreateInfo & setFlags( vk::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -37932,73 +35567,73 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GraphicsPipelineCreateInfo & setPStages( const vk::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
}
- GraphicsPipelineCreateInfo & setPVertexInputState( const vk::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
{
pVertexInputState = pVertexInputState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPInputAssemblyState( const vk::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
{
pInputAssemblyState = pInputAssemblyState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPTessellationState( const vk::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
{
pTessellationState = pTessellationState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPViewportState( const vk::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
{
pViewportState = pViewportState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPRasterizationState( const vk::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
{
pRasterizationState = pRasterizationState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPMultisampleState( const vk::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
{
pMultisampleState = pMultisampleState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPDepthStencilState( const vk::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilState = pDepthStencilState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPColorBlendState( const vk::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
{
pColorBlendState = pColorBlendState_;
return *this;
}
- GraphicsPipelineCreateInfo & setPDynamicState( const vk::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
{
pDynamicState = pDynamicState_;
return *this;
}
- GraphicsPipelineCreateInfo & setLayout( vk::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- GraphicsPipelineCreateInfo & setRenderPass( vk::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
@@ -38010,7 +35645,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- GraphicsPipelineCreateInfo & setBasePipelineHandle( vk::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
@@ -38060,28 +35695,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::GraphicsPipelineCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+ const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {};
+ const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ uint32_t subpass = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
};
static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct XYColorEXT
{
- VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = 0,
- float y_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {},
+ float y_ = {} ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
{}
XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkXYColorEXT*>(this) = rhs;
+ *this = rhs;
}
XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkXYColorEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>(&rhs);
return *this;
}
@@ -38119,81 +35772,46 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- float x;
- float y;
+ float x = {};
+ float y = {};
};
static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<XYColorEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct HdrMetadataEXT
{
- struct HdrMetadataEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR HdrMetadataEXT( vk::XYColorEXT displayPrimaryRed_ = vk::XYColorEXT(),
- vk::XYColorEXT displayPrimaryGreen_ = vk::XYColorEXT(),
- vk::XYColorEXT displayPrimaryBlue_ = vk::XYColorEXT(),
- vk::XYColorEXT whitePoint_ = vk::XYColorEXT(),
- float maxLuminance_ = 0,
- float minLuminance_ = 0,
- float maxContentLightLevel_ = 0,
- float maxFrameAverageLightLevel_ = 0 ) VULKAN_HPP_NOEXCEPT
- : displayPrimaryRed( displayPrimaryRed_ )
- , displayPrimaryGreen( displayPrimaryGreen_ )
- , displayPrimaryBlue( displayPrimaryBlue_ )
- , whitePoint( whitePoint_ )
- , maxLuminance( maxLuminance_ )
- , minLuminance( minLuminance_ )
- , maxContentLightLevel( maxContentLightLevel_ )
- , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
- {}
-
- HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkHdrMetadataEXT*>(this) = rhs;
- }
-
- HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkHdrMetadataEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eHdrMetadataEXT;
- const void* pNext = nullptr;
- vk::XYColorEXT displayPrimaryRed;
- vk::XYColorEXT displayPrimaryGreen;
- vk::XYColorEXT displayPrimaryBlue;
- vk::XYColorEXT whitePoint;
- float maxLuminance;
- float minLuminance;
- float maxContentLightLevel;
- float maxFrameAverageLightLevel;
- };
- static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct HdrMetadataEXT : public layout::HdrMetadataEXT
- {
- VULKAN_HPP_CONSTEXPR HdrMetadataEXT( vk::XYColorEXT displayPrimaryRed_ = vk::XYColorEXT(),
- vk::XYColorEXT displayPrimaryGreen_ = vk::XYColorEXT(),
- vk::XYColorEXT displayPrimaryBlue_ = vk::XYColorEXT(),
- vk::XYColorEXT whitePoint_ = vk::XYColorEXT(),
- float maxLuminance_ = 0,
- float minLuminance_ = 0,
- float maxContentLightLevel_ = 0,
- float maxFrameAverageLightLevel_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::HdrMetadataEXT( displayPrimaryRed_, displayPrimaryGreen_, displayPrimaryBlue_, whitePoint_, maxLuminance_, minLuminance_, maxContentLightLevel_, maxFrameAverageLightLevel_ )
+ VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {},
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {},
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {},
+ VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {},
+ float maxLuminance_ = {},
+ float minLuminance_ = {},
+ float maxContentLightLevel_ = {},
+ float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT
+ : displayPrimaryRed( displayPrimaryRed_ )
+ , displayPrimaryGreen( displayPrimaryGreen_ )
+ , displayPrimaryBlue( displayPrimaryBlue_ )
+ , whitePoint( whitePoint_ )
+ , maxLuminance( maxLuminance_ )
+ , minLuminance( minLuminance_ )
+ , maxContentLightLevel( maxContentLightLevel_ )
+ , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
{}
+ VULKAN_HPP_NAMESPACE::HdrMetadataEXT & operator=( VULKAN_HPP_NAMESPACE::HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) - offsetof( HdrMetadataEXT, pNext ) );
+ return *this;
+ }
+
HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::HdrMetadataEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::HdrMetadataEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>(&rhs);
return *this;
}
@@ -38203,25 +35821,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- HdrMetadataEXT & setDisplayPrimaryRed( vk::XYColorEXT displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryRed = displayPrimaryRed_;
return *this;
}
- HdrMetadataEXT & setDisplayPrimaryGreen( vk::XYColorEXT displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryGreen = displayPrimaryGreen_;
return *this;
}
- HdrMetadataEXT & setDisplayPrimaryBlue( vk::XYColorEXT displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
{
displayPrimaryBlue = displayPrimaryBlue_;
return *this;
}
- HdrMetadataEXT & setWhitePoint( vk::XYColorEXT whitePoint_ ) VULKAN_HPP_NOEXCEPT
+ HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ ) VULKAN_HPP_NOEXCEPT
{
whitePoint = whitePoint_;
return *this;
@@ -38280,53 +35898,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::HdrMetadataEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
+ VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
+ VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
+ float maxLuminance = {};
+ float minLuminance = {};
+ float maxContentLightLevel = {};
+ float maxFrameAverageLightLevel = {};
};
static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct HeadlessSurfaceCreateInfoEXT
{
- struct HeadlessSurfaceCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( vk::HeadlessSurfaceCreateFlagsEXT flags_ = vk::HeadlessSurfaceCreateFlagsEXT() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
-
- HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>(this) = rhs;
- }
-
- HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
- const void* pNext = nullptr;
- vk::HeadlessSurfaceCreateFlagsEXT flags;
- };
- static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct HeadlessSurfaceCreateInfoEXT : public layout::HeadlessSurfaceCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( vk::HeadlessSurfaceCreateFlagsEXT flags_ = vk::HeadlessSurfaceCreateFlagsEXT() ) VULKAN_HPP_NOEXCEPT
- : layout::HeadlessSurfaceCreateInfoEXT( flags_ )
+ VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::HeadlessSurfaceCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::HeadlessSurfaceCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -38336,7 +35942,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- HeadlessSurfaceCreateInfoEXT & setFlags( vk::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -38364,59 +35970,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::HeadlessSurfaceCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
};
static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_IOS_MVK
- namespace layout
- {
- struct IOSSurfaceCreateInfoMVK
- {
- protected:
- VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( vk::IOSSurfaceCreateFlagsMVK flags_ = vk::IOSSurfaceCreateFlagsMVK(),
- const void* pView_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pView( pView_ )
- {}
-
- IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>(this) = rhs;
- }
-
- IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
- const void* pNext = nullptr;
- vk::IOSSurfaceCreateFlagsMVK flags;
- const void* pView;
- };
- static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "layout struct and wrapper have different size!" );
- }
-
- struct IOSSurfaceCreateInfoMVK : public layout::IOSSurfaceCreateInfoMVK
+ struct IOSSurfaceCreateInfoMVK
{
- VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( vk::IOSSurfaceCreateFlagsMVK flags_ = vk::IOSSurfaceCreateFlagsMVK(),
- const void* pView_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::IOSSurfaceCreateInfoMVK( flags_, pView_ )
+ VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {},
+ const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pView( pView_ )
{}
+ VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) - offsetof( IOSSurfaceCreateInfoMVK, pNext ) );
+ return *this;
+ }
+
IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::IOSSurfaceCreateInfoMVK( rhs )
- {}
+ {
+ *this = rhs;
+ }
IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::IOSSurfaceCreateInfoMVK::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>(&rhs);
return *this;
}
@@ -38426,7 +36011,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- IOSSurfaceCreateInfoMVK & setFlags( vk::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+ IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -38461,8 +36046,11 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::IOSSurfaceCreateInfoMVK::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
+ const void* pView = {};
};
static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
@@ -38470,51 +36058,51 @@ namespace VULKAN_HPP_NAMESPACE
struct ImageBlit
{
- VULKAN_HPP_CONSTEXPR_14 ImageBlit( vk::ImageSubresourceLayers srcSubresource_ = vk::ImageSubresourceLayers(),
- std::array<vk::Offset3D,2> const& srcOffsets_ = { { vk::Offset3D() } },
- vk::ImageSubresourceLayers dstSubresource_ = vk::ImageSubresourceLayers(),
- std::array<vk::Offset3D,2> const& dstOffsets_ = { { vk::Offset3D() } } ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ )
, srcOffsets{}
, dstSubresource( dstSubresource_ )
, dstOffsets{}
{
- vk::ConstExpressionArrayCopy<vk::Offset3D,2,2>::copy( srcOffsets, srcOffsets_ );
- vk::ConstExpressionArrayCopy<vk::Offset3D,2,2>::copy( dstOffsets, dstOffsets_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::Offset3D,2,2>::copy( srcOffsets, srcOffsets_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::Offset3D,2,2>::copy( dstOffsets, dstOffsets_ );
}
ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageBlit*>(this) = rhs;
+ *this = rhs;
}
ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageBlit*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>(&rhs);
return *this;
}
- ImageBlit & setSrcSubresource( vk::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- ImageBlit & setSrcOffsets( std::array<vk::Offset3D,2> srcOffsets_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> srcOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( vk::Offset3D ) );
+ memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) );
return *this;
}
- ImageBlit & setDstSubresource( vk::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- ImageBlit & setDstOffsets( std::array<vk::Offset3D,2> dstOffsets_ ) VULKAN_HPP_NOEXCEPT
+ ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> dstOffsets_ ) VULKAN_HPP_NOEXCEPT
{
- memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( vk::Offset3D ) );
+ memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) );
return *this;
}
@@ -38531,9 +36119,9 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( srcSubresource == rhs.srcSubresource )
- && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( vk::Offset3D ) ) == 0 )
+ && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 )
&& ( dstSubresource == rhs.dstSubresource )
- && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( vk::Offset3D ) ) == 0 );
+ && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 );
}
bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -38542,21 +36130,21 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageSubresourceLayers srcSubresource;
- vk::Offset3D srcOffsets[2];
- vk::ImageSubresourceLayers dstSubresource;
- vk::Offset3D dstOffsets[2];
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffsets[2] = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffsets[2] = {};
};
static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
struct ImageCopy
{
- VULKAN_HPP_CONSTEXPR ImageCopy( vk::ImageSubresourceLayers srcSubresource_ = vk::ImageSubresourceLayers(),
- vk::Offset3D srcOffset_ = vk::Offset3D(),
- vk::ImageSubresourceLayers dstSubresource_ = vk::ImageSubresourceLayers(),
- vk::Offset3D dstOffset_ = vk::Offset3D(),
- vk::Extent3D extent_ = vk::Extent3D() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ )
, srcOffset( srcOffset_ )
, dstSubresource( dstSubresource_ )
@@ -38566,40 +36154,40 @@ namespace VULKAN_HPP_NAMESPACE
ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageCopy*>(this) = rhs;
+ *this = rhs;
}
ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageCopy*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>(&rhs);
return *this;
}
- ImageCopy & setSrcSubresource( vk::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- ImageCopy & setSrcOffset( vk::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- ImageCopy & setDstSubresource( vk::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- ImageCopy & setDstOffset( vk::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
- ImageCopy & setExtent( vk::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -38630,104 +36218,59 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageSubresourceLayers srcSubresource;
- vk::Offset3D srcOffset;
- vk::ImageSubresourceLayers dstSubresource;
- vk::Offset3D dstOffset;
- vk::Extent3D extent;
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageCreateInfo( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(),
- vk::ImageType imageType_ = vk::ImageType::e1D,
- vk::Format format_ = vk::Format::eUndefined,
- vk::Extent3D extent_ = vk::Extent3D(),
- uint32_t mipLevels_ = 0,
- uint32_t arrayLayers_ = 0,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal,
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr,
- vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , imageType( imageType_ )
- , format( format_ )
- , extent( extent_ )
- , mipLevels( mipLevels_ )
- , arrayLayers( arrayLayers_ )
- , samples( samples_ )
- , tiling( tiling_ )
- , usage( usage_ )
- , sharingMode( sharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- , initialLayout( initialLayout_ )
- {}
-
- ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageCreateInfo*>(this) = rhs;
- }
-
- ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageCreateInfo;
- const void* pNext = nullptr;
- vk::ImageCreateFlags flags;
- vk::ImageType imageType;
- vk::Format format;
- vk::Extent3D extent;
- uint32_t mipLevels;
- uint32_t arrayLayers;
- vk::SampleCountFlagBits samples;
- vk::ImageTiling tiling;
- vk::ImageUsageFlags usage;
- vk::SharingMode sharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- vk::ImageLayout initialLayout;
- };
- static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageCreateInfo : public layout::ImageCreateInfo
- {
- VULKAN_HPP_CONSTEXPR ImageCreateInfo( vk::ImageCreateFlags flags_ = vk::ImageCreateFlags(),
- vk::ImageType imageType_ = vk::ImageType::e1D,
- vk::Format format_ = vk::Format::eUndefined,
- vk::Extent3D extent_ = vk::Extent3D(),
- uint32_t mipLevels_ = 0,
- uint32_t arrayLayers_ = 0,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal,
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr,
- vk::ImageLayout initialLayout_ = vk::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
- : layout::ImageCreateInfo( flags_, imageType_, format_, extent_, mipLevels_, arrayLayers_, samples_, tiling_, usage_, sharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_, initialLayout_ )
+ struct ImageCreateInfo
+ {
+ VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {},
+ uint32_t mipLevels_ = {},
+ uint32_t arrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t* pQueueFamilyIndices_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , imageType( imageType_ )
+ , format( format_ )
+ , extent( extent_ )
+ , mipLevels( mipLevels_ )
+ , arrayLayers( arrayLayers_ )
+ , samples( samples_ )
+ , tiling( tiling_ )
+ , usage( usage_ )
+ , sharingMode( sharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ , initialLayout( initialLayout_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) - offsetof( ImageCreateInfo, pNext ) );
+ return *this;
+ }
+
ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>(&rhs);
return *this;
}
@@ -38737,25 +36280,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageCreateInfo & setFlags( vk::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImageCreateInfo & setImageType( vk::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
{
imageType = imageType_;
return *this;
}
- ImageCreateInfo & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- ImageCreateInfo & setExtent( vk::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -38773,25 +36316,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageCreateInfo & setSamples( vk::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- ImageCreateInfo & setTiling( vk::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
- ImageCreateInfo & setUsage( vk::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- ImageCreateInfo & setSharingMode( vk::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
@@ -38809,7 +36352,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageCreateInfo & setInitialLayout( vk::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
+ ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
{
initialLayout = initialLayout_;
return *this;
@@ -38835,7 +36378,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( extent == rhs.extent )
&& ( mipLevels == rhs.mipLevels )
&& ( arrayLayers == rhs.arrayLayers )
- && vk::operator==( samples, rhs.samples )
+ && ( samples == rhs.samples )
&& ( tiling == rhs.tiling )
&& ( usage == rhs.usage )
&& ( sharingMode == rhs.sharingMode )
@@ -38849,25 +36392,48 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
+ uint32_t mipLevels = {};
+ uint32_t arrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t* pQueueFamilyIndices = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
};
static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct SubresourceLayout
{
- SubresourceLayout() VULKAN_HPP_NOEXCEPT
+ SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT
+ : offset( offset_ )
+ , size( size_ )
+ , rowPitch( rowPitch_ )
+ , arrayPitch( arrayPitch_ )
+ , depthPitch( depthPitch_ )
{}
SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubresourceLayout*>(this) = rhs;
+ *this = rhs;
}
SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubresourceLayout*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>(&rhs);
return *this;
}
@@ -38896,64 +36462,39 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DeviceSize offset;
- vk::DeviceSize size;
- vk::DeviceSize rowPitch;
- vk::DeviceSize arrayPitch;
- vk::DeviceSize depthPitch;
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
};
static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageDrmFormatModifierExplicitCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0,
- uint32_t drmFormatModifierPlaneCount_ = 0,
- const vk::SubresourceLayout* pPlaneLayouts_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
- , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
- , pPlaneLayouts( pPlaneLayouts_ )
- {}
-
- ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>(this) = rhs;
- }
-
- ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
- const void* pNext = nullptr;
- uint64_t drmFormatModifier;
- uint32_t drmFormatModifierPlaneCount;
- const vk::SubresourceLayout* pPlaneLayouts;
- };
- static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageDrmFormatModifierExplicitCreateInfoEXT : public layout::ImageDrmFormatModifierExplicitCreateInfoEXT
+ struct ImageDrmFormatModifierExplicitCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = 0,
- uint32_t drmFormatModifierPlaneCount_ = 0,
- const vk::SubresourceLayout* pPlaneLayouts_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImageDrmFormatModifierExplicitCreateInfoEXT( drmFormatModifier_, drmFormatModifierPlaneCount_, pPlaneLayouts_ )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {},
+ uint32_t drmFormatModifierPlaneCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ )
+ , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
+ , pPlaneLayouts( pPlaneLayouts_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageDrmFormatModifierExplicitCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageDrmFormatModifierExplicitCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -38975,7 +36516,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const vk::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
+ ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pPlaneLayouts = pPlaneLayouts_;
return *this;
@@ -39005,57 +36546,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageDrmFormatModifierExplicitCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
+ const void* pNext = {};
+ uint64_t drmFormatModifier = {};
+ uint32_t drmFormatModifierPlaneCount = {};
+ const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {};
};
static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImageDrmFormatModifierListCreateInfoEXT
{
- struct ImageDrmFormatModifierListCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0,
- const uint64_t* pDrmFormatModifiers_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifierCount( drmFormatModifierCount_ )
- , pDrmFormatModifiers( pDrmFormatModifiers_ )
- {}
-
- ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>(this) = rhs;
- }
-
- ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
- const void* pNext = nullptr;
- uint32_t drmFormatModifierCount;
- const uint64_t* pDrmFormatModifiers;
- };
- static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageDrmFormatModifierListCreateInfoEXT : public layout::ImageDrmFormatModifierListCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = 0,
- const uint64_t* pDrmFormatModifiers_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImageDrmFormatModifierListCreateInfoEXT( drmFormatModifierCount_, pDrmFormatModifiers_ )
+ VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {},
+ const uint64_t* pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifierCount( drmFormatModifierCount_ )
+ , pDrmFormatModifiers( pDrmFormatModifiers_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageDrmFormatModifierListCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageDrmFormatModifierListCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -39100,52 +36622,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageDrmFormatModifierListCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
+ const void* pNext = {};
+ uint32_t drmFormatModifierCount = {};
+ const uint64_t* pDrmFormatModifiers = {};
};
static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImageDrmFormatModifierPropertiesEXT
{
- struct ImageDrmFormatModifierPropertiesEXT
- {
- protected:
- ImageDrmFormatModifierPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>(this) = rhs;
- }
-
- ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
- void* pNext = nullptr;
- uint64_t drmFormatModifier;
- };
- static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageDrmFormatModifierPropertiesEXT : public layout::ImageDrmFormatModifierPropertiesEXT
- {
- ImageDrmFormatModifierPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::ImageDrmFormatModifierPropertiesEXT()
+ ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) );
+ return *this;
+ }
+
ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageDrmFormatModifierPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageDrmFormatModifierPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>(&rhs);
return *this;
}
@@ -39171,89 +36676,68 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageDrmFormatModifierPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
+ void* pNext = {};
+ uint64_t drmFormatModifier = {};
};
static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageFormatListCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0,
- const vk::Format* pViewFormats_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : viewFormatCount( viewFormatCount_ )
- , pViewFormats( pViewFormats_ )
- {}
-
- ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageFormatListCreateInfoKHR*>(this) = rhs;
- }
-
- ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageFormatListCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageFormatListCreateInfoKHR;
- const void* pNext = nullptr;
- uint32_t viewFormatCount;
- const vk::Format* pViewFormats;
- };
- static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageFormatListCreateInfoKHR : public layout::ImageFormatListCreateInfoKHR
+ struct ImageFormatListCreateInfo
{
- VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = 0,
- const vk::Format* pViewFormats_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImageFormatListCreateInfoKHR( viewFormatCount_, pViewFormats_ )
+ VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT
+ : viewFormatCount( viewFormatCount_ )
+ , pViewFormats( pViewFormats_ )
{}
- ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageFormatListCreateInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) - offsetof( ImageFormatListCreateInfo, pNext ) );
+ return *this;
+ }
- ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageFormatListCreateInfoKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ ImageFormatListCreateInfo& operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>(&rhs);
return *this;
}
- ImageFormatListCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImageFormatListCreateInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
{
viewFormatCount = viewFormatCount_;
return *this;
}
- ImageFormatListCreateInfoKHR & setPViewFormats( const vk::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
+ ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
{
pViewFormats = pViewFormats_;
return *this;
}
- operator VkImageFormatListCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageFormatListCreateInfoKHR*>( this );
+ return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
}
- operator VkImageFormatListCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageFormatListCreateInfoKHR*>( this );
+ return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
}
- bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -39261,57 +36745,40 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pViewFormats == rhs.pViewFormats );
}
- bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::ImageFormatListCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
+ const void* pNext = {};
+ uint32_t viewFormatCount = {};
+ const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
};
- static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageFormatListCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct ImageFormatProperties2
- {
- protected:
- ImageFormatProperties2() VULKAN_HPP_NOEXCEPT
- {}
-
- ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageFormatProperties2*>(this) = rhs;
- }
-
- ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageFormatProperties2*>(this) = rhs;
- return *this;
- }
+ static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
- public:
- vk::StructureType sType = StructureType::eImageFormatProperties2;
- void* pNext = nullptr;
- vk::ImageFormatProperties imageFormatProperties;
- };
- static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageFormatProperties2 : public layout::ImageFormatProperties2
+ struct ImageFormatProperties2
{
- ImageFormatProperties2() VULKAN_HPP_NOEXCEPT
- : layout::ImageFormatProperties2()
+ ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : imageFormatProperties( imageFormatProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) - offsetof( ImageFormatProperties2, pNext ) );
+ return *this;
+ }
+
ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageFormatProperties2( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageFormatProperties2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>(&rhs);
return *this;
}
@@ -39337,19 +36804,21 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageFormatProperties2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
};
static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
struct ImageSubresourceRange
{
- VULKAN_HPP_CONSTEXPR ImageSubresourceRange( vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags(),
- uint32_t baseMipLevel_ = 0,
- uint32_t levelCount_ = 0,
- uint32_t baseArrayLayer_ = 0,
- uint32_t layerCount_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ uint32_t baseMipLevel_ = {},
+ uint32_t levelCount_ = {},
+ uint32_t baseArrayLayer_ = {},
+ uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
: aspectMask( aspectMask_ )
, baseMipLevel( baseMipLevel_ )
, levelCount( levelCount_ )
@@ -39359,16 +36828,16 @@ namespace VULKAN_HPP_NAMESPACE
ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageSubresourceRange*>(this) = rhs;
+ *this = rhs;
}
ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageSubresourceRange*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>(&rhs);
return *this;
}
- ImageSubresourceRange & setAspectMask( vk::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -39423,84 +36892,49 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageAspectFlags aspectMask;
- uint32_t baseMipLevel;
- uint32_t levelCount;
- uint32_t baseArrayLayer;
- uint32_t layerCount;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ uint32_t baseMipLevel = {};
+ uint32_t levelCount = {};
+ uint32_t baseArrayLayer = {};
+ uint32_t layerCount = {};
};
static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageMemoryBarrier
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- vk::ImageLayout oldLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout newLayout_ = vk::ImageLayout::eUndefined,
- uint32_t srcQueueFamilyIndex_ = 0,
- uint32_t dstQueueFamilyIndex_ = 0,
- vk::Image image_ = vk::Image(),
- vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , oldLayout( oldLayout_ )
- , newLayout( newLayout_ )
- , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
- , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
- , image( image_ )
- , subresourceRange( subresourceRange_ )
- {}
-
- ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageMemoryBarrier*>(this) = rhs;
- }
-
- ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageMemoryBarrier*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageMemoryBarrier;
- const void* pNext = nullptr;
- vk::AccessFlags srcAccessMask;
- vk::AccessFlags dstAccessMask;
- vk::ImageLayout oldLayout;
- vk::ImageLayout newLayout;
- uint32_t srcQueueFamilyIndex;
- uint32_t dstQueueFamilyIndex;
- vk::Image image;
- vk::ImageSubresourceRange subresourceRange;
- };
- static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageMemoryBarrier : public layout::ImageMemoryBarrier
- {
- VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- vk::ImageLayout oldLayout_ = vk::ImageLayout::eUndefined,
- vk::ImageLayout newLayout_ = vk::ImageLayout::eUndefined,
- uint32_t srcQueueFamilyIndex_ = 0,
- uint32_t dstQueueFamilyIndex_ = 0,
- vk::Image image_ = vk::Image(),
- vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageMemoryBarrier( srcAccessMask_, dstAccessMask_, oldLayout_, newLayout_, srcQueueFamilyIndex_, dstQueueFamilyIndex_, image_, subresourceRange_ )
+ struct ImageMemoryBarrier
+ {
+ VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
+ uint32_t srcQueueFamilyIndex_ = {},
+ uint32_t dstQueueFamilyIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , oldLayout( oldLayout_ )
+ , newLayout( newLayout_ )
+ , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
+ , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
+ , image( image_ )
+ , subresourceRange( subresourceRange_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageMemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) - offsetof( ImageMemoryBarrier, pNext ) );
+ return *this;
+ }
+
ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageMemoryBarrier( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageMemoryBarrier::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>(&rhs);
return *this;
}
@@ -39510,25 +36944,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageMemoryBarrier & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- ImageMemoryBarrier & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- ImageMemoryBarrier & setOldLayout( vk::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
{
oldLayout = oldLayout_;
return *this;
}
- ImageMemoryBarrier & setNewLayout( vk::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
{
newLayout = newLayout_;
return *this;
@@ -39546,13 +36980,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageMemoryBarrier & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- ImageMemoryBarrier & setSubresourceRange( vk::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
@@ -39587,53 +37021,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageMemoryBarrier::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
+ uint32_t srcQueueFamilyIndex = {};
+ uint32_t dstQueueFamilyIndex = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImageMemoryRequirementsInfo2
{
- struct ImageMemoryRequirementsInfo2
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- {}
-
- ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageMemoryRequirementsInfo2*>(this) = rhs;
- }
-
- ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageMemoryRequirementsInfo2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
- const void* pNext = nullptr;
- vk::Image image;
- };
- static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageMemoryRequirementsInfo2 : public layout::ImageMemoryRequirementsInfo2
- {
- VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageMemoryRequirementsInfo2( image_ )
+ VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) );
+ return *this;
+ }
+
ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageMemoryRequirementsInfo2( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageMemoryRequirementsInfo2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>(&rhs);
return *this;
}
@@ -39643,7 +37065,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageMemoryRequirementsInfo2 & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
@@ -39671,59 +37093,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageMemoryRequirementsInfo2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
};
static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_FUCHSIA
- namespace layout
+ struct ImagePipeSurfaceCreateInfoFUCHSIA
{
- struct ImagePipeSurfaceCreateInfoFUCHSIA
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = vk::ImagePipeSurfaceCreateFlagsFUCHSIA(),
- zx_handle_t imagePipeHandle_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , imagePipeHandle( imagePipeHandle_ )
- {}
-
- ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>(this) = rhs;
- }
-
- ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
- const void* pNext = nullptr;
- vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags;
- zx_handle_t imagePipeHandle;
- };
- static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "layout struct and wrapper have different size!" );
- }
-
- struct ImagePipeSurfaceCreateInfoFUCHSIA : public layout::ImagePipeSurfaceCreateInfoFUCHSIA
- {
- VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = vk::ImagePipeSurfaceCreateFlagsFUCHSIA(),
- zx_handle_t imagePipeHandle_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ImagePipeSurfaceCreateInfoFUCHSIA( flags_, imagePipeHandle_ )
+ VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {},
+ zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , imagePipeHandle( imagePipeHandle_ )
{}
+ VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) );
+ return *this;
+ }
+
ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImagePipeSurfaceCreateInfoFUCHSIA( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImagePipeSurfaceCreateInfoFUCHSIA::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>(&rhs);
return *this;
}
@@ -39733,7 +37134,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( vk::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
+ ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -39768,54 +37169,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImagePipeSurfaceCreateInfoFUCHSIA::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
+ zx_handle_t imagePipeHandle = {};
};
static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
- namespace layout
+ struct ImagePlaneMemoryRequirementsInfo
{
- struct ImagePlaneMemoryRequirementsInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : planeAspect( planeAspect_ )
- {}
-
- ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>(this) = rhs;
- }
-
- ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
- const void* pNext = nullptr;
- vk::ImageAspectFlagBits planeAspect;
- };
- static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ImagePlaneMemoryRequirementsInfo : public layout::ImagePlaneMemoryRequirementsInfo
- {
- VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( vk::ImageAspectFlagBits planeAspect_ = vk::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
- : layout::ImagePlaneMemoryRequirementsInfo( planeAspect_ )
+ VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
+ : planeAspect( planeAspect_ )
{}
+ VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo & operator=( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) );
+ return *this;
+ }
+
ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImagePlaneMemoryRequirementsInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImagePlaneMemoryRequirementsInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>(&rhs);
return *this;
}
@@ -39825,7 +37208,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImagePlaneMemoryRequirementsInfo & setPlaneAspect( vk::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
+ ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
{
planeAspect = planeAspect_;
return *this;
@@ -39845,7 +37228,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( planeAspect, rhs.planeAspect );
+ && ( planeAspect == rhs.planeAspect );
}
bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -39853,19 +37236,21 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImagePlaneMemoryRequirementsInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
};
static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
struct ImageResolve
{
- VULKAN_HPP_CONSTEXPR ImageResolve( vk::ImageSubresourceLayers srcSubresource_ = vk::ImageSubresourceLayers(),
- vk::Offset3D srcOffset_ = vk::Offset3D(),
- vk::ImageSubresourceLayers dstSubresource_ = vk::ImageSubresourceLayers(),
- vk::Offset3D dstOffset_ = vk::Offset3D(),
- vk::Extent3D extent_ = vk::Extent3D() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubresource( srcSubresource_ )
, srcOffset( srcOffset_ )
, dstSubresource( dstSubresource_ )
@@ -39875,40 +37260,40 @@ namespace VULKAN_HPP_NAMESPACE
ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageResolve*>(this) = rhs;
+ *this = rhs;
}
ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkImageResolve*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>(&rhs);
return *this;
}
- ImageResolve & setSrcSubresource( vk::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT
{
srcSubresource = srcSubresource_;
return *this;
}
- ImageResolve & setSrcOffset( vk::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT
{
srcOffset = srcOffset_;
return *this;
}
- ImageResolve & setDstSubresource( vk::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT
{
dstSubresource = dstSubresource_;
return *this;
}
- ImageResolve & setDstOffset( vk::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT
{
dstOffset = dstOffset_;
return *this;
}
- ImageResolve & setExtent( vk::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
+ ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -39939,56 +37324,35 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageSubresourceLayers srcSubresource;
- vk::Offset3D srcOffset;
- vk::ImageSubresourceLayers dstSubresource;
- vk::Offset3D dstOffset;
- vk::Extent3D extent;
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
+ VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
+ VULKAN_HPP_NAMESPACE::Extent3D extent = {};
};
static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageSparseMemoryRequirementsInfo2
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- {}
-
- ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>(this) = rhs;
- }
-
- ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
- const void* pNext = nullptr;
- vk::Image image;
- };
- static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageSparseMemoryRequirementsInfo2 : public layout::ImageSparseMemoryRequirementsInfo2
+ struct ImageSparseMemoryRequirementsInfo2
{
- VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( vk::Image image_ = vk::Image() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageSparseMemoryRequirementsInfo2( image_ )
+ VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & operator=( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) );
+ return *this;
+ }
+
ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageSparseMemoryRequirementsInfo2( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageSparseMemoryRequirementsInfo2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>(&rhs);
return *this;
}
@@ -39998,7 +37362,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageSparseMemoryRequirementsInfo2 & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
@@ -40026,137 +37390,99 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageSparseMemoryRequirementsInfo2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
};
static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageStencilUsageCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfoEXT( vk::ImageUsageFlags stencilUsage_ = vk::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
- : stencilUsage( stencilUsage_ )
- {}
-
- ImageStencilUsageCreateInfoEXT( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageStencilUsageCreateInfoEXT*>(this) = rhs;
- }
-
- ImageStencilUsageCreateInfoEXT& operator=( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageStencilUsageCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageStencilUsageCreateInfoEXT;
- const void* pNext = nullptr;
- vk::ImageUsageFlags stencilUsage;
- };
- static_assert( sizeof( ImageStencilUsageCreateInfoEXT ) == sizeof( VkImageStencilUsageCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageStencilUsageCreateInfoEXT : public layout::ImageStencilUsageCreateInfoEXT
+ struct ImageStencilUsageCreateInfo
{
- VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfoEXT( vk::ImageUsageFlags stencilUsage_ = vk::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageStencilUsageCreateInfoEXT( stencilUsage_ )
+ VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT
+ : stencilUsage( stencilUsage_ )
{}
- ImageStencilUsageCreateInfoEXT( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageStencilUsageCreateInfoEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) - offsetof( ImageStencilUsageCreateInfo, pNext ) );
+ return *this;
+ }
+
+ ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- ImageStencilUsageCreateInfoEXT& operator=( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo& operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageStencilUsageCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>(&rhs);
return *this;
}
- ImageStencilUsageCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- ImageStencilUsageCreateInfoEXT & setStencilUsage( vk::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
+ ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
{
stencilUsage = stencilUsage_;
return *this;
}
- operator VkImageStencilUsageCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkImageStencilUsageCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
}
- operator VkImageStencilUsageCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkImageStencilUsageCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
}
- bool operator==( ImageStencilUsageCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( stencilUsage == rhs.stencilUsage );
}
- bool operator!=( ImageStencilUsageCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::ImageStencilUsageCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
};
- static_assert( sizeof( ImageStencilUsageCreateInfoEXT ) == sizeof( VkImageStencilUsageCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<ImageStencilUsageCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImageSwapchainCreateInfoKHR
{
- struct ImageSwapchainCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR() ) VULKAN_HPP_NOEXCEPT
- : swapchain( swapchain_ )
- {}
-
- ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>(this) = rhs;
- }
-
- ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
- const void* pNext = nullptr;
- vk::SwapchainKHR swapchain;
- };
- static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageSwapchainCreateInfoKHR : public layout::ImageSwapchainCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( vk::SwapchainKHR swapchain_ = vk::SwapchainKHR() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageSwapchainCreateInfoKHR( swapchain_ )
+ VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT
+ : swapchain( swapchain_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageSwapchainCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageSwapchainCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -40166,7 +37492,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageSwapchainCreateInfoKHR & setSwapchain( vk::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
+ ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
{
swapchain = swapchain_;
return *this;
@@ -40194,53 +37520,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageSwapchainCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
};
static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageViewASTCDecodeModeEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( vk::Format decodeMode_ = vk::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
- : decodeMode( decodeMode_ )
- {}
-
- ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>(this) = rhs;
- }
-
- ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
- const void* pNext = nullptr;
- vk::Format decodeMode;
- };
- static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageViewASTCDecodeModeEXT : public layout::ImageViewASTCDecodeModeEXT
+ struct ImageViewASTCDecodeModeEXT
{
- VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( vk::Format decodeMode_ = vk::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewASTCDecodeModeEXT( decodeMode_ )
+ VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
+ : decodeMode( decodeMode_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT & operator=( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) );
+ return *this;
+ }
+
ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewASTCDecodeModeEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageViewASTCDecodeModeEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>(&rhs);
return *this;
}
@@ -40250,7 +37557,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageViewASTCDecodeModeEXT & setDecodeMode( vk::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
{
decodeMode = decodeMode_;
return *this;
@@ -40278,73 +37585,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageViewASTCDecodeModeEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
};
static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ImageViewCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( vk::ImageViewCreateFlags flags_ = vk::ImageViewCreateFlags(),
- vk::Image image_ = vk::Image(),
- vk::ImageViewType viewType_ = vk::ImageViewType::e1D,
- vk::Format format_ = vk::Format::eUndefined,
- vk::ComponentMapping components_ = vk::ComponentMapping(),
- vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , image( image_ )
- , viewType( viewType_ )
- , format( format_ )
- , components( components_ )
- , subresourceRange( subresourceRange_ )
- {}
-
- ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewCreateInfo*>(this) = rhs;
- }
-
- ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageViewCreateInfo;
- const void* pNext = nullptr;
- vk::ImageViewCreateFlags flags;
- vk::Image image;
- vk::ImageViewType viewType;
- vk::Format format;
- vk::ComponentMapping components;
- vk::ImageSubresourceRange subresourceRange;
- };
- static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageViewCreateInfo : public layout::ImageViewCreateInfo
+ struct ImageViewCreateInfo
{
- VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( vk::ImageViewCreateFlags flags_ = vk::ImageViewCreateFlags(),
- vk::Image image_ = vk::Image(),
- vk::ImageViewType viewType_ = vk::ImageViewType::e1D,
- vk::Format format_ = vk::Format::eUndefined,
- vk::ComponentMapping components_ = vk::ComponentMapping(),
- vk::ImageSubresourceRange subresourceRange_ = vk::ImageSubresourceRange() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewCreateInfo( flags_, image_, viewType_, format_, components_, subresourceRange_ )
+ VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
+ VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , image( image_ )
+ , viewType( viewType_ )
+ , format( format_ )
+ , components( components_ )
+ , subresourceRange( subresourceRange_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) - offsetof( ImageViewCreateInfo, pNext ) );
+ return *this;
+ }
+
ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageViewCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>(&rhs);
return *this;
}
@@ -40354,37 +37632,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageViewCreateInfo & setFlags( vk::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImageViewCreateInfo & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- ImageViewCreateInfo & setViewType( vk::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
{
viewType = viewType_;
return *this;
}
- ImageViewCreateInfo & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- ImageViewCreateInfo & setComponents( vk::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
- ImageViewCreateInfo & setSubresourceRange( vk::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT
{
subresourceRange = subresourceRange_;
return *this;
@@ -40417,61 +37695,43 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageViewCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+ VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
};
static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImageViewHandleInfoNVX
{
- struct ImageViewHandleInfoNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( vk::ImageView imageView_ = vk::ImageView(),
- vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler,
- vk::Sampler sampler_ = vk::Sampler() ) VULKAN_HPP_NOEXCEPT
- : imageView( imageView_ )
- , descriptorType( descriptorType_ )
- , sampler( sampler_ )
- {}
-
- ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewHandleInfoNVX*>(this) = rhs;
- }
-
- ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewHandleInfoNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageViewHandleInfoNVX;
- const void* pNext = nullptr;
- vk::ImageView imageView;
- vk::DescriptorType descriptorType;
- vk::Sampler sampler;
- };
- static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageViewHandleInfoNVX : public layout::ImageViewHandleInfoNVX
- {
- VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( vk::ImageView imageView_ = vk::ImageView(),
- vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler,
- vk::Sampler sampler_ = vk::Sampler() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewHandleInfoNVX( imageView_, descriptorType_, sampler_ )
+ VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT
+ : imageView( imageView_ )
+ , descriptorType( descriptorType_ )
+ , sampler( sampler_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) - offsetof( ImageViewHandleInfoNVX, pNext ) );
+ return *this;
+ }
+
ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewHandleInfoNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageViewHandleInfoNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>(&rhs);
return *this;
}
@@ -40481,19 +37741,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageViewHandleInfoNVX & setImageView( vk::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
{
imageView = imageView_;
return *this;
}
- ImageViewHandleInfoNVX & setDescriptorType( vk::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
- ImageViewHandleInfoNVX & setSampler( vk::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
{
sampler = sampler_;
return *this;
@@ -40523,53 +37783,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageViewHandleInfoNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageView imageView = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ VULKAN_HPP_NAMESPACE::Sampler sampler = {};
};
static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImageViewUsageCreateInfo
{
- struct ImageViewUsageCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( vk::ImageUsageFlags usage_ = vk::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
- : usage( usage_ )
- {}
-
- ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewUsageCreateInfo*>(this) = rhs;
- }
-
- ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImageViewUsageCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImageViewUsageCreateInfo;
- const void* pNext = nullptr;
- vk::ImageUsageFlags usage;
- };
- static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ImageViewUsageCreateInfo : public layout::ImageViewUsageCreateInfo
- {
- VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( vk::ImageUsageFlags usage_ = vk::ImageUsageFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewUsageCreateInfo( usage_ )
+ VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT
+ : usage( usage_ )
{}
+ VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) - offsetof( ImageViewUsageCreateInfo, pNext ) );
+ return *this;
+ }
+
ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImageViewUsageCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImageViewUsageCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>(&rhs);
return *this;
}
@@ -40579,7 +37822,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImageViewUsageCreateInfo & setUsage( vk::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
@@ -40607,55 +37850,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImageViewUsageCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
};
static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
+ struct ImportAndroidHardwareBufferInfoANDROID
{
- struct ImportAndroidHardwareBufferInfoANDROID
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : buffer( buffer_ )
- {}
-
- ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>(this) = rhs;
- }
-
- ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
- const void* pNext = nullptr;
- struct AHardwareBuffer* buffer;
- };
- static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportAndroidHardwareBufferInfoANDROID : public layout::ImportAndroidHardwareBufferInfoANDROID
- {
- VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImportAndroidHardwareBufferInfoANDROID( buffer_ )
+ VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : buffer( buffer_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) );
+ return *this;
+ }
+
ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportAndroidHardwareBufferInfoANDROID( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportAndroidHardwareBufferInfoANDROID::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>(&rhs);
return *this;
}
@@ -40693,66 +37917,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportAndroidHardwareBufferInfoANDROID::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
+ const void* pNext = {};
+ struct AHardwareBuffer* buffer = {};
};
static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- namespace layout
- {
- struct ImportFenceFdInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::FenceImportFlags flags_ = vk::FenceImportFlags(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , fd( fd_ )
- {}
-
- ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportFenceFdInfoKHR*>(this) = rhs;
- }
-
- ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportFenceFdInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportFenceFdInfoKHR;
- const void* pNext = nullptr;
- vk::Fence fence;
- vk::FenceImportFlags flags;
- vk::ExternalFenceHandleTypeFlagBits handleType;
- int fd;
- };
- static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportFenceFdInfoKHR : public layout::ImportFenceFdInfoKHR
+ struct ImportFenceFdInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::FenceImportFlags flags_ = vk::FenceImportFlags(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ImportFenceFdInfoKHR( fence_, flags_, handleType_, fd_ )
+ VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ int fd_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , fd( fd_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) - offsetof( ImportFenceFdInfoKHR, pNext ) );
+ return *this;
+ }
+
ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportFenceFdInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportFenceFdInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>(&rhs);
return *this;
}
@@ -40762,19 +37961,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportFenceFdInfoKHR & setFence( vk::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
- ImportFenceFdInfoKHR & setFlags( vk::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImportFenceFdInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40802,7 +38001,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
&& ( flags == rhs.flags )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( fd == rhs.fd );
}
@@ -40811,71 +38010,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportFenceFdInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
};
static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
+ struct ImportFenceWin32HandleInfoKHR
{
- struct ImportFenceWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::FenceImportFlags flags_ = vk::FenceImportFlags(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : fence( fence_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , handle( handle_ )
- , name( name_ )
- {}
-
- ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>(this) = rhs;
- }
-
- ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
- const void* pNext = nullptr;
- vk::Fence fence;
- vk::FenceImportFlags flags;
- vk::ExternalFenceHandleTypeFlagBits handleType;
- HANDLE handle;
- LPCWSTR name;
- };
- static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportFenceWin32HandleInfoKHR : public layout::ImportFenceWin32HandleInfoKHR
- {
- VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( vk::Fence fence_ = vk::Fence(),
- vk::FenceImportFlags flags_ = vk::FenceImportFlags(),
- vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImportFenceWin32HandleInfoKHR( fence_, flags_, handleType_, handle_, name_ )
+ VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {},
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
+ HANDLE handle_ = {},
+ LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fence( fence_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , handle( handle_ )
+ , name( name_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportFenceWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportFenceWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -40885,19 +38060,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportFenceWin32HandleInfoKHR & setFence( vk::Fence fence_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
{
fence = fence_;
return *this;
}
- ImportFenceWin32HandleInfoKHR & setFlags( vk::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImportFenceWin32HandleInfoKHR & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -40931,7 +38106,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( fence == rhs.fence )
&& ( flags == rhs.flags )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( handle == rhs.handle )
&& ( name == rhs.name );
}
@@ -40941,58 +38116,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportFenceWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Fence fence = {};
+ VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
};
static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
+ struct ImportMemoryFdInfoKHR
{
- struct ImportMemoryFdInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , fd( fd_ )
- {}
-
- ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryFdInfoKHR*>(this) = rhs;
- }
-
- ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryFdInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- int fd;
- };
- static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportMemoryFdInfoKHR : public layout::ImportMemoryFdInfoKHR
- {
- VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryFdInfoKHR( handleType_, fd_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ int fd_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
+ , fd( fd_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) - offsetof( ImportMemoryFdInfoKHR, pNext ) );
+ return *this;
+ }
+
ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryFdInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportMemoryFdInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>(&rhs);
return *this;
}
@@ -41002,7 +38160,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportMemoryFdInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -41028,7 +38186,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( fd == rhs.fd );
}
@@ -41037,57 +38195,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportMemoryFdInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
};
static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ImportMemoryHostPointerInfoEXT
{
- struct ImportMemoryHostPointerInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- void* pHostPointer_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , pHostPointer( pHostPointer_ )
- {}
-
- ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>(this) = rhs;
- }
-
- ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- void* pHostPointer;
- };
- static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportMemoryHostPointerInfoEXT : public layout::ImportMemoryHostPointerInfoEXT
- {
- VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- void* pHostPointer_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryHostPointerInfoEXT( handleType_, pHostPointer_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ void* pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
+ , pHostPointer( pHostPointer_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) );
+ return *this;
+ }
+
ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryHostPointerInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportMemoryHostPointerInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>(&rhs);
return *this;
}
@@ -41097,7 +38235,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportMemoryHostPointerInfoEXT & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -41123,7 +38261,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( pHostPointer == rhs.pHostPointer );
}
@@ -41132,63 +38270,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportMemoryHostPointerInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ void* pHostPointer = {};
};
static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct ImportMemoryWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , handle( handle_ )
- , name( name_ )
- {}
-
- ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>(this) = rhs;
- }
-
- ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- HANDLE handle;
- LPCWSTR name;
- };
- static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportMemoryWin32HandleInfoKHR : public layout::ImportMemoryWin32HandleInfoKHR
+ struct ImportMemoryWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryWin32HandleInfoKHR( handleType_, handle_, name_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
+ HANDLE handle_ = {},
+ LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
+ , handle( handle_ )
+ , name( name_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportMemoryWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -41198,7 +38314,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportMemoryWin32HandleInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -41230,7 +38346,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( handle == rhs.handle )
&& ( name == rhs.name );
}
@@ -41240,8 +38356,12 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportMemoryWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
};
static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -41249,51 +38369,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct ImportMemoryWin32HandleInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleType_ = vk::ExternalMemoryHandleTypeFlagsNV(),
- HANDLE handle_ = 0 ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- , handle( handle_ )
- {}
-
- ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>(this) = rhs;
- }
-
- ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagsNV handleType;
- HANDLE handle;
- };
- static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportMemoryWin32HandleInfoNV : public layout::ImportMemoryWin32HandleInfoNV
+ struct ImportMemoryWin32HandleInfoNV
{
- VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( vk::ExternalMemoryHandleTypeFlagsNV handleType_ = vk::ExternalMemoryHandleTypeFlagsNV(),
- HANDLE handle_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryWin32HandleInfoNV( handleType_, handle_ )
+ VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {},
+ HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
+ , handle( handle_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV & operator=( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) );
+ return *this;
+ }
+
ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportMemoryWin32HandleInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportMemoryWin32HandleInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>(&rhs);
return *this;
}
@@ -41303,7 +38400,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportMemoryWin32HandleInfoNV & setHandleType( vk::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -41338,66 +38435,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportMemoryWin32HandleInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
+ HANDLE handle = {};
};
static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
- {
- struct ImportSemaphoreFdInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , fd( fd_ )
- {}
-
- ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>(this) = rhs;
- }
-
- ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
- const void* pNext = nullptr;
- vk::Semaphore semaphore;
- vk::SemaphoreImportFlags flags;
- vk::ExternalSemaphoreHandleTypeFlagBits handleType;
- int fd;
- };
- static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportSemaphoreFdInfoKHR : public layout::ImportSemaphoreFdInfoKHR
+ struct ImportSemaphoreFdInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- int fd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ImportSemaphoreFdInfoKHR( semaphore_, flags_, handleType_, fd_ )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ int fd_ = {} ) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , fd( fd_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) - offsetof( ImportSemaphoreFdInfoKHR, pNext ) );
+ return *this;
+ }
+
ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportSemaphoreFdInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportSemaphoreFdInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>(&rhs);
return *this;
}
@@ -41407,19 +38480,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportSemaphoreFdInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- ImportSemaphoreFdInfoKHR & setFlags( vk::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImportSemaphoreFdInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -41447,7 +38520,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( flags == rhs.flags )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( fd == rhs.fd );
}
@@ -41456,71 +38529,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportSemaphoreFdInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ int fd = {};
};
static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct ImportSemaphoreWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , flags( flags_ )
- , handleType( handleType_ )
- , handle( handle_ )
- , name( name_ )
- {}
-
- ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>(this) = rhs;
- }
-
- ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
- const void* pNext = nullptr;
- vk::Semaphore semaphore;
- vk::SemaphoreImportFlags flags;
- vk::ExternalSemaphoreHandleTypeFlagBits handleType;
- HANDLE handle;
- LPCWSTR name;
- };
- static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct ImportSemaphoreWin32HandleInfoKHR : public layout::ImportSemaphoreWin32HandleInfoKHR
+ struct ImportSemaphoreWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::SemaphoreImportFlags flags_ = vk::SemaphoreImportFlags(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
- HANDLE handle_ = 0,
- LPCWSTR name_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ImportSemaphoreWin32HandleInfoKHR( semaphore_, flags_, handleType_, handle_, name_ )
+ VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
+ HANDLE handle_ = {},
+ LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ )
+ , flags( flags_ )
+ , handleType( handleType_ )
+ , handle( handle_ )
+ , name( name_ )
{}
+ VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ImportSemaphoreWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ImportSemaphoreWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -41530,19 +38579,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setFlags( vk::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ImportSemaphoreWin32HandleInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -41576,7 +38625,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
&& ( flags == rhs.flags )
- && vk::operator==( handleType, rhs.handleType )
+ && ( handleType == rhs.handleType )
&& ( handle == rhs.handle )
&& ( name == rhs.name );
}
@@ -41586,8 +38635,14 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ImportSemaphoreWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
+ HANDLE handle = {};
+ LPCWSTR name = {};
};
static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -41595,10 +38650,10 @@ namespace VULKAN_HPP_NAMESPACE
struct IndirectCommandsLayoutTokenNVX
{
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( vk::IndirectCommandsTokenTypeNVX tokenType_ = vk::IndirectCommandsTokenTypeNVX::ePipeline,
- uint32_t bindingUnit_ = 0,
- uint32_t dynamicCount_ = 0,
- uint32_t divisor_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline,
+ uint32_t bindingUnit_ = {},
+ uint32_t dynamicCount_ = {},
+ uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
: tokenType( tokenType_ )
, bindingUnit( bindingUnit_ )
, dynamicCount( dynamicCount_ )
@@ -41607,16 +38662,16 @@ namespace VULKAN_HPP_NAMESPACE
IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkIndirectCommandsLayoutTokenNVX*>(this) = rhs;
+ *this = rhs;
}
IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkIndirectCommandsLayoutTokenNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX const *>(&rhs);
return *this;
}
- IndirectCommandsLayoutTokenNVX & setTokenType( vk::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT
{
tokenType = tokenType_;
return *this;
@@ -41664,67 +38719,40 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::IndirectCommandsTokenTypeNVX tokenType;
- uint32_t bindingUnit;
- uint32_t dynamicCount;
- uint32_t divisor;
+ VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline;
+ uint32_t bindingUnit = {};
+ uint32_t dynamicCount = {};
+ uint32_t divisor = {};
};
static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct IndirectCommandsLayoutCreateInfoNVX
{
- struct IndirectCommandsLayoutCreateInfoNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- vk::IndirectCommandsLayoutUsageFlagsNVX flags_ = vk::IndirectCommandsLayoutUsageFlagsNVX(),
- uint32_t tokenCount_ = 0,
- const vk::IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pipelineBindPoint( pipelineBindPoint_ )
- , flags( flags_ )
- , tokenCount( tokenCount_ )
- , pTokens( pTokens_ )
- {}
-
- IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX*>(this) = rhs;
- }
-
- IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX;
- const void* pNext = nullptr;
- vk::PipelineBindPoint pipelineBindPoint;
- vk::IndirectCommandsLayoutUsageFlagsNVX flags;
- uint32_t tokenCount;
- const vk::IndirectCommandsLayoutTokenNVX* pTokens;
- };
- static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct IndirectCommandsLayoutCreateInfoNVX : public layout::IndirectCommandsLayoutCreateInfoNVX
- {
- VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- vk::IndirectCommandsLayoutUsageFlagsNVX flags_ = vk::IndirectCommandsLayoutUsageFlagsNVX(),
- uint32_t tokenCount_ = 0,
- const vk::IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::IndirectCommandsLayoutCreateInfoNVX( pipelineBindPoint_, flags_, tokenCount_, pTokens_ )
+ VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ = {},
+ uint32_t tokenCount_ = {},
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pipelineBindPoint( pipelineBindPoint_ )
+ , flags( flags_ )
+ , tokenCount( tokenCount_ )
+ , pTokens( pTokens_ )
{}
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX ) - offsetof( IndirectCommandsLayoutCreateInfoNVX, pNext ) );
+ return *this;
+ }
+
IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::IndirectCommandsLayoutCreateInfoNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::IndirectCommandsLayoutCreateInfoNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX const *>(&rhs);
return *this;
}
@@ -41734,13 +38762,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setFlags( vk::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNVX & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -41752,7 +38780,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- IndirectCommandsLayoutCreateInfoNVX & setPTokens( const vk::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT
+ IndirectCommandsLayoutCreateInfoNVX & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT
{
pTokens = pTokens_;
return *this;
@@ -41783,53 +38811,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::IndirectCommandsLayoutCreateInfoNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags = {};
+ uint32_t tokenCount = {};
+ const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens = {};
};
static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct InitializePerformanceApiInfoINTEL
{
- struct InitializePerformanceApiInfoINTEL
- {
- protected:
- VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pUserData( pUserData_ )
- {}
-
- InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>(this) = rhs;
- }
-
- InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
- const void* pNext = nullptr;
- void* pUserData;
- };
- static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "layout struct and wrapper have different size!" );
- }
-
- struct InitializePerformanceApiInfoINTEL : public layout::InitializePerformanceApiInfoINTEL
- {
- VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::InitializePerformanceApiInfoINTEL( pUserData_ )
+ VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pUserData( pUserData_ )
{}
+ VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) );
+ return *this;
+ }
+
InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::InitializePerformanceApiInfoINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::InitializePerformanceApiInfoINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>(&rhs);
return *this;
}
@@ -41867,17 +38879,19 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::InitializePerformanceApiInfoINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
+ const void* pNext = {};
+ void* pUserData = {};
};
static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
struct InputAttachmentAspectReference
{
- VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = 0,
- uint32_t inputAttachmentIndex_ = 0,
- vk::ImageAspectFlags aspectMask_ = vk::ImageAspectFlags() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {},
+ uint32_t inputAttachmentIndex_ = {},
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
: subpass( subpass_ )
, inputAttachmentIndex( inputAttachmentIndex_ )
, aspectMask( aspectMask_ )
@@ -41885,12 +38899,12 @@ namespace VULKAN_HPP_NAMESPACE
InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkInputAttachmentAspectReference*>(this) = rhs;
+ *this = rhs;
}
InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkInputAttachmentAspectReference*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>(&rhs);
return *this;
}
@@ -41906,7 +38920,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- InputAttachmentAspectReference & setAspectMask( vk::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
+ InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
{
aspectMask = aspectMask_;
return *this;
@@ -41935,74 +38949,43 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t subpass;
- uint32_t inputAttachmentIndex;
- vk::ImageAspectFlags aspectMask;
+ uint32_t subpass = {};
+ uint32_t inputAttachmentIndex = {};
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
};
static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct InstanceCreateInfo
{
- struct InstanceCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR InstanceCreateInfo( vk::InstanceCreateFlags flags_ = vk::InstanceCreateFlags(),
- const vk::ApplicationInfo* pApplicationInfo_ = nullptr,
- uint32_t enabledLayerCount_ = 0,
- const char* const* ppEnabledLayerNames_ = nullptr,
- uint32_t enabledExtensionCount_ = 0,
- const char* const* ppEnabledExtensionNames_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pApplicationInfo( pApplicationInfo_ )
- , enabledLayerCount( enabledLayerCount_ )
- , ppEnabledLayerNames( ppEnabledLayerNames_ )
- , enabledExtensionCount( enabledExtensionCount_ )
- , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
- {}
-
- InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkInstanceCreateInfo*>(this) = rhs;
- }
-
- InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkInstanceCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eInstanceCreateInfo;
- const void* pNext = nullptr;
- vk::InstanceCreateFlags flags;
- const vk::ApplicationInfo* pApplicationInfo;
- uint32_t enabledLayerCount;
- const char* const* ppEnabledLayerNames;
- uint32_t enabledExtensionCount;
- const char* const* ppEnabledExtensionNames;
- };
- static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct InstanceCreateInfo : public layout::InstanceCreateInfo
- {
- VULKAN_HPP_CONSTEXPR InstanceCreateInfo( vk::InstanceCreateFlags flags_ = vk::InstanceCreateFlags(),
- const vk::ApplicationInfo* pApplicationInfo_ = nullptr,
- uint32_t enabledLayerCount_ = 0,
- const char* const* ppEnabledLayerNames_ = nullptr,
- uint32_t enabledExtensionCount_ = 0,
- const char* const* ppEnabledExtensionNames_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::InstanceCreateInfo( flags_, pApplicationInfo_, enabledLayerCount_, ppEnabledLayerNames_, enabledExtensionCount_, ppEnabledExtensionNames_ )
+ VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {},
+ const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {},
+ uint32_t enabledLayerCount_ = {},
+ const char* const* ppEnabledLayerNames_ = {},
+ uint32_t enabledExtensionCount_ = {},
+ const char* const* ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pApplicationInfo( pApplicationInfo_ )
+ , enabledLayerCount( enabledLayerCount_ )
+ , ppEnabledLayerNames( ppEnabledLayerNames_ )
+ , enabledExtensionCount( enabledExtensionCount_ )
+ , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
{}
+ VULKAN_HPP_NAMESPACE::InstanceCreateInfo & operator=( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) - offsetof( InstanceCreateInfo, pNext ) );
+ return *this;
+ }
+
InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::InstanceCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::InstanceCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>(&rhs);
return *this;
}
@@ -42012,13 +38995,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- InstanceCreateInfo & setFlags( vk::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- InstanceCreateInfo & setPApplicationInfo( const vk::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
+ InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
{
pApplicationInfo = pApplicationInfo_;
return *this;
@@ -42075,25 +39058,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::InstanceCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
+ const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {};
+ uint32_t enabledLayerCount = {};
+ const char* const* ppEnabledLayerNames = {};
+ uint32_t enabledExtensionCount = {};
+ const char* const* ppEnabledExtensionNames = {};
};
static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct LayerProperties
{
- LayerProperties() VULKAN_HPP_NOEXCEPT
- {}
+ LayerProperties( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layerName_ = {},
+ uint32_t specVersion_ = {},
+ uint32_t implementationVersion_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {} ) VULKAN_HPP_NOEXCEPT
+ : layerName{}
+ , specVersion( specVersion_ )
+ , implementationVersion( implementationVersion_ )
+ , description{}
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( layerName, layerName_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ }
LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkLayerProperties*>(this) = rhs;
+ *this = rhs;
}
LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkLayerProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>(&rhs);
return *this;
}
@@ -42121,61 +39121,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- char layerName[VK_MAX_EXTENSION_NAME_SIZE];
- uint32_t specVersion;
- uint32_t implementationVersion;
- char description[VK_MAX_DESCRIPTION_SIZE];
+ char layerName[VK_MAX_EXTENSION_NAME_SIZE] = {};
+ uint32_t specVersion = {};
+ uint32_t implementationVersion = {};
+ char description[VK_MAX_DESCRIPTION_SIZE] = {};
};
static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_MACOS_MVK
- namespace layout
- {
- struct MacOSSurfaceCreateInfoMVK
- {
- protected:
- VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( vk::MacOSSurfaceCreateFlagsMVK flags_ = vk::MacOSSurfaceCreateFlagsMVK(),
- const void* pView_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pView( pView_ )
- {}
-
- MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>(this) = rhs;
- }
-
- MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
- const void* pNext = nullptr;
- vk::MacOSSurfaceCreateFlagsMVK flags;
- const void* pView;
- };
- static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "layout struct and wrapper have different size!" );
- }
-
- struct MacOSSurfaceCreateInfoMVK : public layout::MacOSSurfaceCreateInfoMVK
+ struct MacOSSurfaceCreateInfoMVK
{
- VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( vk::MacOSSurfaceCreateFlagsMVK flags_ = vk::MacOSSurfaceCreateFlagsMVK(),
- const void* pView_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::MacOSSurfaceCreateInfoMVK( flags_, pView_ )
+ VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {},
+ const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pView( pView_ )
{}
+ VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & operator=( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) - offsetof( MacOSSurfaceCreateInfoMVK, pNext ) );
+ return *this;
+ }
+
MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MacOSSurfaceCreateInfoMVK( rhs )
- {}
+ {
+ *this = rhs;
+ }
MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MacOSSurfaceCreateInfoMVK::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>(&rhs);
return *this;
}
@@ -42185,7 +39162,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MacOSSurfaceCreateInfoMVK & setFlags( vk::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
+ MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -42220,62 +39197,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MacOSSurfaceCreateInfoMVK::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
+ const void* pView = {};
};
static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
- namespace layout
+ struct MappedMemoryRange
{
- struct MappedMemoryRange
- {
- protected:
- VULKAN_HPP_CONSTEXPR MappedMemoryRange( vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- , offset( offset_ )
- , size( size_ )
- {}
-
- MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMappedMemoryRange*>(this) = rhs;
- }
-
- MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMappedMemoryRange*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMappedMemoryRange;
- const void* pNext = nullptr;
- vk::DeviceMemory memory;
- vk::DeviceSize offset;
- vk::DeviceSize size;
- };
- static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "layout struct and wrapper have different size!" );
- }
-
- struct MappedMemoryRange : public layout::MappedMemoryRange
- {
- VULKAN_HPP_CONSTEXPR MappedMemoryRange( vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::DeviceSize offset_ = 0,
- vk::DeviceSize size_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::MappedMemoryRange( memory_, offset_, size_ )
+ VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
+ , offset( offset_ )
+ , size( size_ )
{}
+ VULKAN_HPP_NAMESPACE::MappedMemoryRange & operator=( VULKAN_HPP_NAMESPACE::MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) - offsetof( MappedMemoryRange, pNext ) );
+ return *this;
+ }
+
MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MappedMemoryRange( rhs )
- {}
+ {
+ *this = rhs;
+ }
MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MappedMemoryRange::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>(&rhs);
return *this;
}
@@ -42285,19 +39240,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MappedMemoryRange & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- MappedMemoryRange & setOffset( vk::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- MappedMemoryRange & setSize( vk::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
+ MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
{
size = size_;
return *this;
@@ -42327,57 +39282,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MappedMemoryRange::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
};
static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct MemoryAllocateFlagsInfo
{
- struct MemoryAllocateFlagsInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( vk::MemoryAllocateFlags flags_ = vk::MemoryAllocateFlags(),
- uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , deviceMask( deviceMask_ )
- {}
-
- MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryAllocateFlagsInfo*>(this) = rhs;
- }
-
- MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryAllocateFlagsInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
- const void* pNext = nullptr;
- vk::MemoryAllocateFlags flags;
- uint32_t deviceMask;
- };
- static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryAllocateFlagsInfo : public layout::MemoryAllocateFlagsInfo
- {
- VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( vk::MemoryAllocateFlags flags_ = vk::MemoryAllocateFlags(),
- uint32_t deviceMask_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryAllocateFlagsInfo( flags_, deviceMask_ )
+ VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {},
+ uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , deviceMask( deviceMask_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) - offsetof( MemoryAllocateFlagsInfo, pNext ) );
+ return *this;
+ }
+
MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryAllocateFlagsInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryAllocateFlagsInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>(&rhs);
return *this;
}
@@ -42387,7 +39323,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryAllocateFlagsInfo & setFlags( vk::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -42422,57 +39358,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryAllocateFlagsInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
+ uint32_t deviceMask = {};
};
static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct MemoryAllocateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( vk::DeviceSize allocationSize_ = 0,
- uint32_t memoryTypeIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : allocationSize( allocationSize_ )
- , memoryTypeIndex( memoryTypeIndex_ )
- {}
-
- MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryAllocateInfo*>(this) = rhs;
- }
-
- MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryAllocateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryAllocateInfo;
- const void* pNext = nullptr;
- vk::DeviceSize allocationSize;
- uint32_t memoryTypeIndex;
- };
- static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryAllocateInfo : public layout::MemoryAllocateInfo
+ struct MemoryAllocateInfo
{
- VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( vk::DeviceSize allocationSize_ = 0,
- uint32_t memoryTypeIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryAllocateInfo( allocationSize_, memoryTypeIndex_ )
+ VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
+ uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : allocationSize( allocationSize_ )
+ , memoryTypeIndex( memoryTypeIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) - offsetof( MemoryAllocateInfo, pNext ) );
+ return *this;
+ }
+
MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryAllocateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryAllocateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>(&rhs);
return *this;
}
@@ -42482,7 +39398,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryAllocateInfo & setAllocationSize( vk::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
+ MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
{
allocationSize = allocationSize_;
return *this;
@@ -42517,57 +39433,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryAllocateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
+ uint32_t memoryTypeIndex = {};
};
static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct MemoryBarrier
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags() ) VULKAN_HPP_NOEXCEPT
- : srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- {}
-
- MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryBarrier*>(this) = rhs;
- }
-
- MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryBarrier*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryBarrier;
- const void* pNext = nullptr;
- vk::AccessFlags srcAccessMask;
- vk::AccessFlags dstAccessMask;
- };
- static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryBarrier : public layout::MemoryBarrier
+ struct MemoryBarrier
{
- VULKAN_HPP_CONSTEXPR MemoryBarrier( vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryBarrier( srcAccessMask_, dstAccessMask_ )
+ VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ : srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryBarrier & operator=( VULKAN_HPP_NAMESPACE::MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) - offsetof( MemoryBarrier, pNext ) );
+ return *this;
+ }
+
MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryBarrier( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryBarrier::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>(&rhs);
return *this;
}
@@ -42577,13 +39473,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryBarrier & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- MemoryBarrier & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
@@ -42612,57 +39508,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryBarrier::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
};
static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct MemoryDedicatedAllocateInfo
{
- struct MemoryDedicatedAllocateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( vk::Image image_ = vk::Image(),
- vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : image( image_ )
- , buffer( buffer_ )
- {}
-
- MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>(this) = rhs;
- }
-
- MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
- const void* pNext = nullptr;
- vk::Image image;
- vk::Buffer buffer;
- };
- static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryDedicatedAllocateInfo : public layout::MemoryDedicatedAllocateInfo
- {
- VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( vk::Image image_ = vk::Image(),
- vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryDedicatedAllocateInfo( image_, buffer_ )
+ VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : image( image_ )
+ , buffer( buffer_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) );
+ return *this;
+ }
+
MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryDedicatedAllocateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryDedicatedAllocateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>(&rhs);
return *this;
}
@@ -42672,13 +39548,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryDedicatedAllocateInfo & setImage( vk::Image image_ ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
{
image = image_;
return *this;
}
- MemoryDedicatedAllocateInfo & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
@@ -42707,53 +39583,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryDedicatedAllocateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Image image = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct MemoryDedicatedRequirements
{
- struct MemoryDedicatedRequirements
- {
- protected:
- MemoryDedicatedRequirements() VULKAN_HPP_NOEXCEPT
- {}
-
- MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryDedicatedRequirements*>(this) = rhs;
- }
-
- MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryDedicatedRequirements*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryDedicatedRequirements;
- void* pNext = nullptr;
- vk::Bool32 prefersDedicatedAllocation;
- vk::Bool32 requiresDedicatedAllocation;
- };
- static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryDedicatedRequirements : public layout::MemoryDedicatedRequirements
- {
- MemoryDedicatedRequirements() VULKAN_HPP_NOEXCEPT
- : layout::MemoryDedicatedRequirements()
+ MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
+ : prefersDedicatedAllocation( prefersDedicatedAllocation_ )
+ , requiresDedicatedAllocation( requiresDedicatedAllocation_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements & operator=( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) );
+ return *this;
+ }
+
MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryDedicatedRequirements( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryDedicatedRequirements::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>(&rhs);
return *this;
}
@@ -42780,52 +39640,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryDedicatedRequirements::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
};
static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct MemoryFdPropertiesKHR
{
- struct MemoryFdPropertiesKHR
- {
- protected:
- MemoryFdPropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryFdPropertiesKHR*>(this) = rhs;
- }
-
- MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryFdPropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
- void* pNext = nullptr;
- uint32_t memoryTypeBits;
- };
- static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryFdPropertiesKHR : public layout::MemoryFdPropertiesKHR
- {
- MemoryFdPropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::MemoryFdPropertiesKHR()
+ MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryTypeBits( memoryTypeBits_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) - offsetof( MemoryFdPropertiesKHR, pNext ) );
+ return *this;
+ }
+
MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryFdPropertiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryFdPropertiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>(&rhs);
return *this;
}
@@ -42851,55 +39694,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryFdPropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
+ void* pNext = {};
+ uint32_t memoryTypeBits = {};
};
static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- namespace layout
- {
- struct MemoryGetAndroidHardwareBufferInfoANDROID
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( vk::DeviceMemory memory_ = vk::DeviceMemory() ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- {}
-
- MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>(this) = rhs;
- }
-
- MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
- const void* pNext = nullptr;
- vk::DeviceMemory memory;
- };
- static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryGetAndroidHardwareBufferInfoANDROID : public layout::MemoryGetAndroidHardwareBufferInfoANDROID
+ struct MemoryGetAndroidHardwareBufferInfoANDROID
{
- VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( vk::DeviceMemory memory_ = vk::DeviceMemory() ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryGetAndroidHardwareBufferInfoANDROID( memory_ )
+ VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) );
+ return *this;
+ }
+
MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryGetAndroidHardwareBufferInfoANDROID( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryGetAndroidHardwareBufferInfoANDROID::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>(&rhs);
return *this;
}
@@ -42909,7 +39733,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
@@ -42937,58 +39761,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryGetAndroidHardwareBufferInfoANDROID::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
};
static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- namespace layout
+ struct MemoryGetFdInfoKHR
{
- struct MemoryGetFdInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- , handleType( handleType_ )
- {}
-
- MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryGetFdInfoKHR*>(this) = rhs;
- }
-
- MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryGetFdInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
- const void* pNext = nullptr;
- vk::DeviceMemory memory;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryGetFdInfoKHR : public layout::MemoryGetFdInfoKHR
- {
- VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryGetFdInfoKHR( memory_, handleType_ )
+ VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) - offsetof( MemoryGetFdInfoKHR, pNext ) );
+ return *this;
+ }
+
MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryGetFdInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryGetFdInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>(&rhs);
return *this;
}
@@ -42998,13 +39801,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryGetFdInfoKHR & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- MemoryGetFdInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -43025,7 +39828,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -43033,59 +39836,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryGetFdInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct MemoryGetWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : memory( memory_ )
- , handleType( handleType_ )
- {}
-
- MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>(this) = rhs;
- }
-
- MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
- const void* pNext = nullptr;
- vk::DeviceMemory memory;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryGetWin32HandleInfoKHR : public layout::MemoryGetWin32HandleInfoKHR
+ struct MemoryGetWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( vk::DeviceMemory memory_ = vk::DeviceMemory(),
- vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryGetWin32HandleInfoKHR( memory_, handleType_ )
+ VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : memory( memory_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryGetWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryGetWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -43095,13 +39878,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MemoryGetWin32HandleInfoKHR & setMemory( vk::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
{
memory = memory_;
return *this;
}
- MemoryGetWin32HandleInfoKHR & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -43122,7 +39905,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( memory == rhs.memory )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -43130,8 +39913,11 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryGetWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -43139,17 +39925,20 @@ namespace VULKAN_HPP_NAMESPACE
struct MemoryHeap
{
- MemoryHeap() VULKAN_HPP_NOEXCEPT
+ MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : size( size_ )
+ , flags( flags_ )
{}
MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkMemoryHeap*>(this) = rhs;
+ *this = rhs;
}
MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkMemoryHeap*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>(&rhs);
return *this;
}
@@ -43175,52 +39964,32 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DeviceSize size;
- vk::MemoryHeapFlags flags;
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
};
static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct MemoryHostPointerPropertiesEXT
{
- struct MemoryHostPointerPropertiesEXT
- {
- protected:
- MemoryHostPointerPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>(this) = rhs;
- }
-
- MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
- void* pNext = nullptr;
- uint32_t memoryTypeBits;
- };
- static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryHostPointerPropertiesEXT : public layout::MemoryHostPointerPropertiesEXT
- {
- MemoryHostPointerPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::MemoryHostPointerPropertiesEXT()
+ MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryTypeBits( memoryTypeBits_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) );
+ return *this;
+ }
+
MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryHostPointerPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryHostPointerPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>(&rhs);
return *this;
}
@@ -43246,53 +40015,99 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryHostPointerPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
+ void* pNext = {};
+ uint32_t memoryTypeBits = {};
};
static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct MemoryOpaqueCaptureAddressAllocateInfo
{
- struct MemoryPriorityAllocateInfoEXT
+ VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
+ : opaqueCaptureAddress( opaqueCaptureAddress_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo & operator=( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = 0 ) VULKAN_HPP_NOEXCEPT
- : priority( priority_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) - offsetof( MemoryOpaqueCaptureAddressAllocateInfo, pNext ) );
+ return *this;
+ }
- MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>(this) = rhs;
- }
+ MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>(this) = rhs;
- return *this;
- }
+ MemoryOpaqueCaptureAddressAllocateInfo& operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
- const void* pNext = nullptr;
- float priority;
- };
- static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "layout struct and wrapper have different size!" );
- }
+ MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ opaqueCaptureAddress = opaqueCaptureAddress_;
+ return *this;
+ }
+
+ operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+ }
+
+ operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
+ }
+
+ bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
+ }
+
+ bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
+ const void* pNext = {};
+ uint64_t opaqueCaptureAddress = {};
+ };
+ static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
- struct MemoryPriorityAllocateInfoEXT : public layout::MemoryPriorityAllocateInfoEXT
+ struct MemoryPriorityAllocateInfoEXT
{
- VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryPriorityAllocateInfoEXT( priority_ )
+ VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT
+ : priority( priority_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) );
+ return *this;
+ }
+
MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryPriorityAllocateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryPriorityAllocateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>(&rhs);
return *this;
}
@@ -43330,25 +40145,32 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryPriorityAllocateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
+ const void* pNext = {};
+ float priority = {};
};
static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
struct MemoryRequirements
{
- MemoryRequirements() VULKAN_HPP_NOEXCEPT
+ MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {},
+ uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
+ : size( size_ )
+ , alignment( alignment_ )
+ , memoryTypeBits( memoryTypeBits_ )
{}
MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkMemoryRequirements*>(this) = rhs;
+ *this = rhs;
}
MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkMemoryRequirements*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>(&rhs);
return *this;
}
@@ -43375,53 +40197,33 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::DeviceSize size;
- vk::DeviceSize alignment;
- uint32_t memoryTypeBits;
+ VULKAN_HPP_NAMESPACE::DeviceSize size = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
+ uint32_t memoryTypeBits = {};
};
static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct MemoryRequirements2
- {
- protected:
- MemoryRequirements2() VULKAN_HPP_NOEXCEPT
- {}
-
- MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryRequirements2*>(this) = rhs;
- }
-
- MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryRequirements2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryRequirements2;
- void* pNext = nullptr;
- vk::MemoryRequirements memoryRequirements;
- };
- static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryRequirements2 : public layout::MemoryRequirements2
+ struct MemoryRequirements2
{
- MemoryRequirements2() VULKAN_HPP_NOEXCEPT
- : layout::MemoryRequirements2()
+ MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryRequirements( memoryRequirements_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) );
+ return *this;
+ }
+
MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryRequirements2( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryRequirements2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>(&rhs);
return *this;
}
@@ -43447,25 +40249,30 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryRequirements2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
};
static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
struct MemoryType
{
- MemoryType() VULKAN_HPP_NOEXCEPT
+ MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {},
+ uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : propertyFlags( propertyFlags_ )
+ , heapIndex( heapIndex_ )
{}
MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkMemoryType*>(this) = rhs;
+ *this = rhs;
}
MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkMemoryType*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>(&rhs);
return *this;
}
@@ -43491,54 +40298,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::MemoryPropertyFlags propertyFlags;
- uint32_t heapIndex;
+ VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
+ uint32_t heapIndex = {};
};
static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct MemoryWin32HandlePropertiesKHR
- {
- protected:
- MemoryWin32HandlePropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>(this) = rhs;
- }
-
- MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
- void* pNext = nullptr;
- uint32_t memoryTypeBits;
- };
- static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct MemoryWin32HandlePropertiesKHR : public layout::MemoryWin32HandlePropertiesKHR
+ struct MemoryWin32HandlePropertiesKHR
{
- MemoryWin32HandlePropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::MemoryWin32HandlePropertiesKHR()
+ MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryTypeBits( memoryTypeBits_ )
{}
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) );
+ return *this;
+ }
+
MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MemoryWin32HandlePropertiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MemoryWin32HandlePropertiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>(&rhs);
return *this;
}
@@ -43564,8 +40351,10 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MemoryWin32HandlePropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
+ void* pNext = {};
+ uint32_t memoryTypeBits = {};
};
static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
@@ -43573,51 +40362,28 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_METAL_EXT
- namespace layout
+ struct MetalSurfaceCreateInfoEXT
{
- struct MetalSurfaceCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( vk::MetalSurfaceCreateFlagsEXT flags_ = vk::MetalSurfaceCreateFlagsEXT(),
- const CAMetalLayer* pLayer_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pLayer( pLayer_ )
- {}
-
- MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>(this) = rhs;
- }
-
- MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
- const void* pNext = nullptr;
- vk::MetalSurfaceCreateFlagsEXT flags;
- const CAMetalLayer* pLayer;
- };
- static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct MetalSurfaceCreateInfoEXT : public layout::MetalSurfaceCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( vk::MetalSurfaceCreateFlagsEXT flags_ = vk::MetalSurfaceCreateFlagsEXT(),
- const CAMetalLayer* pLayer_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::MetalSurfaceCreateInfoEXT( flags_, pLayer_ )
+ VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {},
+ const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pLayer( pLayer_ )
{}
+ VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MetalSurfaceCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MetalSurfaceCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -43627,7 +40393,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- MetalSurfaceCreateInfoEXT & setFlags( vk::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -43662,53 +40428,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MetalSurfaceCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
+ const CAMetalLayer* pLayer = {};
};
static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_METAL_EXT*/
- namespace layout
+ struct MultisamplePropertiesEXT
{
- struct MultisamplePropertiesEXT
- {
- protected:
- MultisamplePropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMultisamplePropertiesEXT*>(this) = rhs;
- }
-
- MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkMultisamplePropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eMultisamplePropertiesEXT;
- void* pNext = nullptr;
- vk::Extent2D maxSampleLocationGridSize;
- };
- static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct MultisamplePropertiesEXT : public layout::MultisamplePropertiesEXT
- {
- MultisamplePropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::MultisamplePropertiesEXT()
+ MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
{}
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) );
+ return *this;
+ }
+
MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::MultisamplePropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::MultisamplePropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>(&rhs);
return *this;
}
@@ -43734,85 +40483,50 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::MultisamplePropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
};
static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ObjectTableCreateInfoNVX
{
- struct ObjectTableCreateInfoNVX
- {
- protected:
- VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0,
- const vk::ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr,
- const uint32_t* pObjectEntryCounts_ = nullptr,
- const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr,
- uint32_t maxUniformBuffersPerDescriptor_ = 0,
- uint32_t maxStorageBuffersPerDescriptor_ = 0,
- uint32_t maxStorageImagesPerDescriptor_ = 0,
- uint32_t maxSampledImagesPerDescriptor_ = 0,
- uint32_t maxPipelineLayouts_ = 0 ) VULKAN_HPP_NOEXCEPT
- : objectCount( objectCount_ )
- , pObjectEntryTypes( pObjectEntryTypes_ )
- , pObjectEntryCounts( pObjectEntryCounts_ )
- , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
- , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
- , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
- , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
- , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
- , maxPipelineLayouts( maxPipelineLayouts_ )
- {}
-
- ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkObjectTableCreateInfoNVX*>(this) = rhs;
- }
-
- ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkObjectTableCreateInfoNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eObjectTableCreateInfoNVX;
- const void* pNext = nullptr;
- uint32_t objectCount;
- const vk::ObjectEntryTypeNVX* pObjectEntryTypes;
- const uint32_t* pObjectEntryCounts;
- const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
- uint32_t maxUniformBuffersPerDescriptor;
- uint32_t maxStorageBuffersPerDescriptor;
- uint32_t maxStorageImagesPerDescriptor;
- uint32_t maxSampledImagesPerDescriptor;
- uint32_t maxPipelineLayouts;
- };
- static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct ObjectTableCreateInfoNVX : public layout::ObjectTableCreateInfoNVX
- {
- VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0,
- const vk::ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr,
- const uint32_t* pObjectEntryCounts_ = nullptr,
- const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr,
- uint32_t maxUniformBuffersPerDescriptor_ = 0,
- uint32_t maxStorageBuffersPerDescriptor_ = 0,
- uint32_t maxStorageImagesPerDescriptor_ = 0,
- uint32_t maxSampledImagesPerDescriptor_ = 0,
- uint32_t maxPipelineLayouts_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ObjectTableCreateInfoNVX( objectCount_, pObjectEntryTypes_, pObjectEntryCounts_, pObjectEntryUsageFlags_, maxUniformBuffersPerDescriptor_, maxStorageBuffersPerDescriptor_, maxStorageImagesPerDescriptor_, maxSampledImagesPerDescriptor_, maxPipelineLayouts_ )
+ VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = {},
+ const uint32_t* pObjectEntryCounts_ = {},
+ const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = {},
+ uint32_t maxUniformBuffersPerDescriptor_ = {},
+ uint32_t maxStorageBuffersPerDescriptor_ = {},
+ uint32_t maxStorageImagesPerDescriptor_ = {},
+ uint32_t maxSampledImagesPerDescriptor_ = {},
+ uint32_t maxPipelineLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
+ : objectCount( objectCount_ )
+ , pObjectEntryTypes( pObjectEntryTypes_ )
+ , pObjectEntryCounts( pObjectEntryCounts_ )
+ , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
+ , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
+ , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
+ , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
+ , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
+ , maxPipelineLayouts( maxPipelineLayouts_ )
{}
+ VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX & operator=( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) );
+ return *this;
+ }
+
ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ObjectTableCreateInfoNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ObjectTableCreateInfoNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX const *>(&rhs);
return *this;
}
@@ -43828,7 +40542,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ObjectTableCreateInfoNVX & setPObjectEntryTypes( const vk::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT
{
pObjectEntryTypes = pObjectEntryTypes_;
return *this;
@@ -43840,7 +40554,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const vk::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT
{
pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
return *this;
@@ -43906,38 +40620,48 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ObjectTableCreateInfoNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX;
+ const void* pNext = {};
+ uint32_t objectCount = {};
+ const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes = {};
+ const uint32_t* pObjectEntryCounts = {};
+ const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags = {};
+ uint32_t maxUniformBuffersPerDescriptor = {};
+ uint32_t maxStorageBuffersPerDescriptor = {};
+ uint32_t maxStorageImagesPerDescriptor = {};
+ uint32_t maxSampledImagesPerDescriptor = {};
+ uint32_t maxPipelineLayouts = {};
};
static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTableCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
struct ObjectTableEntryNVX
{
- VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet,
- vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
{}
ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableEntryNVX*>(this) = rhs;
+ *this = rhs;
}
ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableEntryNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX const *>(&rhs);
return *this;
}
- ObjectTableEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- ObjectTableEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -43965,18 +40689,18 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ObjectEntryTypeNVX type;
- vk::ObjectEntryUsageFlagsNVX flags;
+ VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
};
static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTableEntryNVX>::value, "struct wrapper is not a standard layout!" );
struct ObjectTableDescriptorSetEntryNVX
{
- VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet,
- vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(),
- vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(),
- vk::DescriptorSet descriptorSet_ = vk::DescriptorSet() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
, pipelineLayout( pipelineLayout_ )
@@ -43984,8 +40708,8 @@ namespace VULKAN_HPP_NAMESPACE
{}
explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(),
- vk::DescriptorSet descriptorSet_ = vk::DescriptorSet() )
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} )
: type( objectTableEntryNVX.type )
, flags( objectTableEntryNVX.flags )
, pipelineLayout( pipelineLayout_ )
@@ -43994,34 +40718,34 @@ namespace VULKAN_HPP_NAMESPACE
ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableDescriptorSetEntryNVX*>(this) = rhs;
+ *this = rhs;
}
ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableDescriptorSetEntryNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableDescriptorSetEntryNVX const *>(&rhs);
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setPipelineLayout( vk::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLayout = pipelineLayout_;
return *this;
}
- ObjectTableDescriptorSetEntryNVX & setDescriptorSet( vk::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT
{
descriptorSet = descriptorSet_;
return *this;
@@ -44051,20 +40775,20 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ObjectEntryTypeNVX type;
- vk::ObjectEntryUsageFlagsNVX flags;
- vk::PipelineLayout pipelineLayout;
- vk::DescriptorSet descriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet = {};
};
static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTableDescriptorSetEntryNVX>::value, "struct wrapper is not a standard layout!" );
struct ObjectTableIndexBufferEntryNVX
{
- VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet,
- vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(),
- vk::Buffer buffer_ = vk::Buffer(),
- vk::IndexType indexType_ = vk::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
, buffer( buffer_ )
@@ -44072,8 +40796,8 @@ namespace VULKAN_HPP_NAMESPACE
{}
explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- vk::Buffer buffer_ = vk::Buffer(),
- vk::IndexType indexType_ = vk::IndexType::eUint16 )
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {},
+ VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 )
: type( objectTableEntryNVX.type )
, flags( objectTableEntryNVX.flags )
, buffer( buffer_ )
@@ -44082,34 +40806,34 @@ namespace VULKAN_HPP_NAMESPACE
ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableIndexBufferEntryNVX*>(this) = rhs;
+ *this = rhs;
}
ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableIndexBufferEntryNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableIndexBufferEntryNVX const *>(&rhs);
return *this;
}
- ObjectTableIndexBufferEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- ObjectTableIndexBufferEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ObjectTableIndexBufferEntryNVX & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
}
- ObjectTableIndexBufferEntryNVX & setIndexType( vk::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
{
indexType = indexType_;
return *this;
@@ -44139,26 +40863,26 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ObjectEntryTypeNVX type;
- vk::ObjectEntryUsageFlagsNVX flags;
- vk::Buffer buffer;
- vk::IndexType indexType;
+ VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
+ VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
};
static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTableIndexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
struct ObjectTablePipelineEntryNVX
{
- VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet,
- vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(),
- vk::Pipeline pipeline_ = vk::Pipeline() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
, pipeline( pipeline_ )
{}
explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- vk::Pipeline pipeline_ = vk::Pipeline() )
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} )
: type( objectTableEntryNVX.type )
, flags( objectTableEntryNVX.flags )
, pipeline( pipeline_ )
@@ -44166,28 +40890,28 @@ namespace VULKAN_HPP_NAMESPACE
ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTablePipelineEntryNVX*>(this) = rhs;
+ *this = rhs;
}
ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTablePipelineEntryNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePipelineEntryNVX const *>(&rhs);
return *this;
}
- ObjectTablePipelineEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- ObjectTablePipelineEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ObjectTablePipelineEntryNVX & setPipeline( vk::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
@@ -44216,19 +40940,19 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ObjectEntryTypeNVX type;
- vk::ObjectEntryUsageFlagsNVX flags;
- vk::Pipeline pipeline;
+ VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTablePipelineEntryNVX>::value, "struct wrapper is not a standard layout!" );
struct ObjectTablePushConstantEntryNVX
{
- VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet,
- vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(),
- vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(),
- vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
, pipelineLayout( pipelineLayout_ )
@@ -44236,8 +40960,8 @@ namespace VULKAN_HPP_NAMESPACE
{}
explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- vk::PipelineLayout pipelineLayout_ = vk::PipelineLayout(),
- vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags() )
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} )
: type( objectTableEntryNVX.type )
, flags( objectTableEntryNVX.flags )
, pipelineLayout( pipelineLayout_ )
@@ -44246,34 +40970,34 @@ namespace VULKAN_HPP_NAMESPACE
ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTablePushConstantEntryNVX*>(this) = rhs;
+ *this = rhs;
}
ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTablePushConstantEntryNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTablePushConstantEntryNVX const *>(&rhs);
return *this;
}
- ObjectTablePushConstantEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- ObjectTablePushConstantEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ObjectTablePushConstantEntryNVX & setPipelineLayout( vk::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
{
pipelineLayout = pipelineLayout_;
return *this;
}
- ObjectTablePushConstantEntryNVX & setStageFlags( vk::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
@@ -44303,26 +41027,26 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ObjectEntryTypeNVX type;
- vk::ObjectEntryUsageFlagsNVX flags;
- vk::PipelineLayout pipelineLayout;
- vk::ShaderStageFlags stageFlags;
+ VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
};
static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTablePushConstantEntryNVX>::value, "struct wrapper is not a standard layout!" );
struct ObjectTableVertexBufferEntryNVX
{
- VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( vk::ObjectEntryTypeNVX type_ = vk::ObjectEntryTypeNVX::eDescriptorSet,
- vk::ObjectEntryUsageFlagsNVX flags_ = vk::ObjectEntryUsageFlagsNVX(),
- vk::Buffer buffer_ = vk::Buffer() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet,
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {},
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, flags( flags_ )
, buffer( buffer_ )
{}
explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX,
- vk::Buffer buffer_ = vk::Buffer() )
+ VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} )
: type( objectTableEntryNVX.type )
, flags( objectTableEntryNVX.flags )
, buffer( buffer_ )
@@ -44330,28 +41054,28 @@ namespace VULKAN_HPP_NAMESPACE
ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableVertexBufferEntryNVX*>(this) = rhs;
+ *this = rhs;
}
ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkObjectTableVertexBufferEntryNVX*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ObjectTableVertexBufferEntryNVX const *>(&rhs);
return *this;
}
- ObjectTableVertexBufferEntryNVX & setType( vk::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- ObjectTableVertexBufferEntryNVX & setFlags( vk::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- ObjectTableVertexBufferEntryNVX & setBuffer( vk::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
+ ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
{
buffer = buffer_;
return *this;
@@ -44380,26 +41104,35 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ObjectEntryTypeNVX type;
- vk::ObjectEntryUsageFlagsNVX flags;
- vk::Buffer buffer;
+ VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet;
+ VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {};
+ VULKAN_HPP_NAMESPACE::Buffer buffer = {};
};
static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ObjectTableVertexBufferEntryNVX>::value, "struct wrapper is not a standard layout!" );
struct PastPresentationTimingGOOGLE
{
- PastPresentationTimingGOOGLE() VULKAN_HPP_NOEXCEPT
+ PastPresentationTimingGOOGLE( uint32_t presentID_ = {},
+ uint64_t desiredPresentTime_ = {},
+ uint64_t actualPresentTime_ = {},
+ uint64_t earliestPresentTime_ = {},
+ uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT
+ : presentID( presentID_ )
+ , desiredPresentTime( desiredPresentTime_ )
+ , actualPresentTime( actualPresentTime_ )
+ , earliestPresentTime( earliestPresentTime_ )
+ , presentMargin( presentMargin_ )
{}
PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPastPresentationTimingGOOGLE*>(this) = rhs;
+ *this = rhs;
}
PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPastPresentationTimingGOOGLE*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>(&rhs);
return *this;
}
@@ -44428,56 +41161,35 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t presentID;
- uint64_t desiredPresentTime;
- uint64_t actualPresentTime;
- uint64_t earliestPresentTime;
- uint64_t presentMargin;
+ uint32_t presentID = {};
+ uint64_t desiredPresentTime = {};
+ uint64_t actualPresentTime = {};
+ uint64_t earliestPresentTime = {};
+ uint64_t presentMargin = {};
};
static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PerformanceConfigurationAcquireInfoINTEL
- {
- protected:
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( vk::PerformanceConfigurationTypeINTEL type_ = vk::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- {}
-
- PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>(this) = rhs;
- }
-
- PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
- const void* pNext = nullptr;
- vk::PerformanceConfigurationTypeINTEL type;
- };
- static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "layout struct and wrapper have different size!" );
- }
-
- struct PerformanceConfigurationAcquireInfoINTEL : public layout::PerformanceConfigurationAcquireInfoINTEL
+ struct PerformanceConfigurationAcquireInfoINTEL
{
- VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( vk::PerformanceConfigurationTypeINTEL type_ = vk::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceConfigurationAcquireInfoINTEL( type_ )
+ VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT
+ : type( type_ )
{}
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) );
+ return *this;
+ }
+
PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceConfigurationAcquireInfoINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PerformanceConfigurationAcquireInfoINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>(&rhs);
return *this;
}
@@ -44487,7 +41199,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PerformanceConfigurationAcquireInfoINTEL & setType( vk::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
@@ -44515,53 +41227,262 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PerformanceConfigurationAcquireInfoINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
};
static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PerformanceCounterDescriptionKHR
{
- struct PerformanceMarkerInfoINTEL
+ PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& category_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , name{}
+ , category{}
+ , description{}
{
- protected:
- VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = 0 ) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
- {}
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( category, category_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ }
- PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>(this) = rhs;
- }
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) - offsetof( PerformanceCounterDescriptionKHR, pNext ) );
+ return *this;
+ }
- PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>(this) = rhs;
- return *this;
- }
+ PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- public:
- vk::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
- const void* pNext = nullptr;
- uint64_t marker;
- };
- static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "layout struct and wrapper have different size!" );
- }
+ PerformanceCounterDescriptionKHR& operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>(&rhs);
+ return *this;
+ }
+
+ operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
+ }
+
+ operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
+ }
+
+ bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( flags == rhs.flags )
+ && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( category, rhs.category, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
+ }
+
+ bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
+ char name[VK_MAX_DESCRIPTION_SIZE] = {};
+ char category[VK_MAX_DESCRIPTION_SIZE] = {};
+ char description[VK_MAX_DESCRIPTION_SIZE] = {};
+ };
+ static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct PerformanceCounterKHR
+ {
+ PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer,
+ VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
+ std::array<uint8_t,VK_UUID_SIZE> const& uuid_ = {} ) VULKAN_HPP_NOEXCEPT
+ : unit( unit_ )
+ , scope( scope_ )
+ , storage( storage_ )
+ , uuid{}
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( uuid, uuid_ );
+ }
- struct PerformanceMarkerInfoINTEL : public layout::PerformanceMarkerInfoINTEL
+ VULKAN_HPP_NAMESPACE::PerformanceCounterKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) - offsetof( PerformanceCounterKHR, pNext ) );
+ return *this;
+ }
+
+ PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PerformanceCounterKHR& operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>(&rhs);
+ return *this;
+ }
+
+ operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
+ }
+
+ operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
+ }
+
+ bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( unit == rhs.unit )
+ && ( scope == rhs.scope )
+ && ( storage == rhs.storage )
+ && ( memcmp( uuid, rhs.uuid, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 );
+ }
+
+ bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
+ VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
+ uint8_t uuid[VK_UUID_SIZE] = {};
+ };
+ static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+
+ union PerformanceCounterResultKHR
{
- VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceMarkerInfoINTEL( marker_ )
+ PerformanceCounterResultKHR( int32_t int32_ = {} )
+ {
+ int32 = int32_;
+ }
+
+ PerformanceCounterResultKHR( int64_t int64_ )
+ {
+ int64 = int64_;
+ }
+
+ PerformanceCounterResultKHR( uint32_t uint32_ )
+ {
+ uint32 = uint32_;
+ }
+
+ PerformanceCounterResultKHR( uint64_t uint64_ )
+ {
+ uint64 = uint64_;
+ }
+
+ PerformanceCounterResultKHR( float float32_ )
+ {
+ float32 = float32_;
+ }
+
+ PerformanceCounterResultKHR( double float64_ )
+ {
+ float64 = float64_;
+ }
+
+ PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ int32 = int32_;
+ return *this;
+ }
+
+ PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ int64 = int64_;
+ return *this;
+ }
+
+ PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ uint32 = uint32_;
+ return *this;
+ }
+
+ PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ uint64 = uint64_;
+ return *this;
+ }
+
+ PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ float32 = float32_;
+ return *this;
+ }
+
+ PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ float64 = float64_;
+ return *this;
+ }
+
+ VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
+ return *this;
+ }
+
+ operator VkPerformanceCounterResultKHR const&() const
+ {
+ return *reinterpret_cast<const VkPerformanceCounterResultKHR*>(this);
+ }
+
+ operator VkPerformanceCounterResultKHR &()
+ {
+ return *reinterpret_cast<VkPerformanceCounterResultKHR*>(this);
+ }
+
+ int32_t int32;
+ int64_t int64;
+ uint32_t uint32;
+ uint64_t uint64;
+ float float32;
+ double float64;
+ };
+
+ struct PerformanceMarkerInfoINTEL
+ {
+ VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT
+ : marker( marker_ )
{}
+ VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) );
+ return *this;
+ }
+
PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceMarkerInfoINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PerformanceMarkerInfoINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>(&rhs);
return *this;
}
@@ -44599,61 +41520,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PerformanceMarkerInfoINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
+ const void* pNext = {};
+ uint64_t marker = {};
};
static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PerformanceOverrideInfoINTEL
{
- struct PerformanceOverrideInfoINTEL
- {
- protected:
- VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( vk::PerformanceOverrideTypeINTEL type_ = vk::PerformanceOverrideTypeINTEL::eNullHardware,
- vk::Bool32 enable_ = 0,
- uint64_t parameter_ = 0 ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , enable( enable_ )
- , parameter( parameter_ )
- {}
-
- PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>(this) = rhs;
- }
-
- PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
- const void* pNext = nullptr;
- vk::PerformanceOverrideTypeINTEL type;
- vk::Bool32 enable;
- uint64_t parameter;
- };
- static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "layout struct and wrapper have different size!" );
- }
-
- struct PerformanceOverrideInfoINTEL : public layout::PerformanceOverrideInfoINTEL
- {
- VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( vk::PerformanceOverrideTypeINTEL type_ = vk::PerformanceOverrideTypeINTEL::eNullHardware,
- vk::Bool32 enable_ = 0,
- uint64_t parameter_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceOverrideInfoINTEL( type_, enable_, parameter_ )
+ VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware,
+ VULKAN_HPP_NAMESPACE::Bool32 enable_ = {},
+ uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT
+ : type( type_ )
+ , enable( enable_ )
+ , parameter( parameter_ )
{}
+ VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) );
+ return *this;
+ }
+
PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceOverrideInfoINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PerformanceOverrideInfoINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>(&rhs);
return *this;
}
@@ -44663,13 +41561,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PerformanceOverrideInfoINTEL & setType( vk::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- PerformanceOverrideInfoINTEL & setEnable( vk::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
{
enable = enable_;
return *this;
@@ -44705,53 +41603,101 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PerformanceOverrideInfoINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
+ VULKAN_HPP_NAMESPACE::Bool32 enable = {};
+ uint64_t parameter = {};
};
static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PerformanceQuerySubmitInfoKHR
{
- struct PerformanceStreamMarkerInfoINTEL
+ VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : counterPassIndex( counterPassIndex_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = 0 ) VULKAN_HPP_NOEXCEPT
- : marker( marker_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) - offsetof( PerformanceQuerySubmitInfoKHR, pNext ) );
+ return *this;
+ }
- PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>(this) = rhs;
- }
+ PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>(this) = rhs;
- return *this;
- }
+ PerformanceQuerySubmitInfoKHR& operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
- const void* pNext = nullptr;
- uint32_t marker;
- };
- static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "layout struct and wrapper have different size!" );
- }
+ PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
- struct PerformanceStreamMarkerInfoINTEL : public layout::PerformanceStreamMarkerInfoINTEL
+ PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ counterPassIndex = counterPassIndex_;
+ return *this;
+ }
+
+ operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
+ }
+
+ operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
+ }
+
+ bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( counterPassIndex == rhs.counterPassIndex );
+ }
+
+ bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
+ const void* pNext = {};
+ uint32_t counterPassIndex = {};
+ };
+ static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct PerformanceStreamMarkerInfoINTEL
{
- VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceStreamMarkerInfoINTEL( marker_ )
+ VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT
+ : marker( marker_ )
{}
+ VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) );
+ return *this;
+ }
+
PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PerformanceStreamMarkerInfoINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PerformanceStreamMarkerInfoINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>(&rhs);
return *this;
}
@@ -44789,15 +41735,17 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PerformanceStreamMarkerInfoINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
+ const void* pNext = {};
+ uint32_t marker = {};
};
static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
union PerformanceValueDataINTEL
{
- PerformanceValueDataINTEL( uint32_t value32_ = 0 )
+ PerformanceValueDataINTEL( uint32_t value32_ = {} )
{
value32 = value32_;
}
@@ -44835,7 +41783,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PerformanceValueDataINTEL & setValueBool( vk::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
{
valueBool = valueBool_;
return *this;
@@ -44846,6 +41794,13 @@ namespace VULKAN_HPP_NAMESPACE
valueString = valueString_;
return *this;
}
+
+ VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
+ return *this;
+ }
+
operator VkPerformanceValueDataINTEL const&() const
{
return *reinterpret_cast<const VkPerformanceValueDataINTEL*>(this);
@@ -44860,7 +41815,7 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t value32;
uint64_t value64;
float valueFloat;
- vk::Bool32 valueBool;
+ VULKAN_HPP_NAMESPACE::Bool32 valueBool;
const char* valueString;
#else
uint32_t value32;
@@ -44873,30 +41828,30 @@ namespace VULKAN_HPP_NAMESPACE
struct PerformanceValueINTEL
{
- PerformanceValueINTEL( vk::PerformanceValueTypeINTEL type_ = vk::PerformanceValueTypeINTEL::eUint32,
- vk::PerformanceValueDataINTEL data_ = vk::PerformanceValueDataINTEL() ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32,
+ VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT
: type( type_ )
, data( data_ )
{}
PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPerformanceValueINTEL*>(this) = rhs;
+ *this = rhs;
}
PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPerformanceValueINTEL*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>(&rhs);
return *this;
}
- PerformanceValueINTEL & setType( vk::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- PerformanceValueINTEL & setData( vk::PerformanceValueDataINTEL data_ ) VULKAN_HPP_NOEXCEPT
+ PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ ) VULKAN_HPP_NOEXCEPT
{
data = data_;
return *this;
@@ -44913,65 +41868,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::PerformanceValueTypeINTEL type;
- vk::PerformanceValueDataINTEL data;
+ VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
+ VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
};
static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDevice16BitStorageFeatures
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( vk::Bool32 storageBuffer16BitAccess_ = 0,
- vk::Bool32 uniformAndStorageBuffer16BitAccess_ = 0,
- vk::Bool32 storagePushConstant16_ = 0,
- vk::Bool32 storageInputOutput16_ = 0 ) VULKAN_HPP_NOEXCEPT
- : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
- , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
- , storagePushConstant16( storagePushConstant16_ )
- , storageInputOutput16( storageInputOutput16_ )
- {}
-
- PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>(this) = rhs;
- }
-
- PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
- void* pNext = nullptr;
- vk::Bool32 storageBuffer16BitAccess;
- vk::Bool32 uniformAndStorageBuffer16BitAccess;
- vk::Bool32 storagePushConstant16;
- vk::Bool32 storageInputOutput16;
- };
- static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDevice16BitStorageFeatures : public layout::PhysicalDevice16BitStorageFeatures
+ struct PhysicalDevice16BitStorageFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( vk::Bool32 storageBuffer16BitAccess_ = 0,
- vk::Bool32 uniformAndStorageBuffer16BitAccess_ = 0,
- vk::Bool32 storagePushConstant16_ = 0,
- vk::Bool32 storageInputOutput16_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevice16BitStorageFeatures( storageBuffer16BitAccess_, uniformAndStorageBuffer16BitAccess_, storagePushConstant16_, storageInputOutput16_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT
+ : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+ , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
+ , storagePushConstant16( storagePushConstant16_ )
+ , storageInputOutput16( storageInputOutput16_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) );
+ return *this;
+ }
+
PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevice16BitStorageFeatures( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDevice16BitStorageFeatures::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>(&rhs);
return *this;
}
@@ -44981,25 +41909,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( vk::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer16BitAccess = storageBuffer16BitAccess_;
return *this;
}
- PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( vk::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
return *this;
}
- PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( vk::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant16 = storagePushConstant16_;
return *this;
}
- PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( vk::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
{
storageInputOutput16 = storageInputOutput16_;
return *this;
@@ -45030,99 +41958,79 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDevice16BitStorageFeatures::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
};
static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDevice8BitStorageFeatures
{
- struct PhysicalDevice8BitStorageFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( vk::Bool32 storageBuffer8BitAccess_ = 0,
- vk::Bool32 uniformAndStorageBuffer8BitAccess_ = 0,
- vk::Bool32 storagePushConstant8_ = 0 ) VULKAN_HPP_NOEXCEPT
- : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
- , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
- , storagePushConstant8( storagePushConstant8_ )
- {}
-
- PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 storageBuffer8BitAccess;
- vk::Bool32 uniformAndStorageBuffer8BitAccess;
- vk::Bool32 storagePushConstant8;
- };
- static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDevice8BitStorageFeaturesKHR : public layout::PhysicalDevice8BitStorageFeaturesKHR
- {
- VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( vk::Bool32 storageBuffer8BitAccess_ = 0,
- vk::Bool32 uniformAndStorageBuffer8BitAccess_ = 0,
- vk::Bool32 storagePushConstant8_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevice8BitStorageFeaturesKHR( storageBuffer8BitAccess_, uniformAndStorageBuffer8BitAccess_, storagePushConstant8_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT
+ : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
+ , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
+ , storagePushConstant8( storagePushConstant8_ )
{}
- PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevice8BitStorageFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) - offsetof( PhysicalDevice8BitStorageFeatures, pNext ) );
+ return *this;
+ }
- PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDevice8BitStorageFeaturesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDevice8BitStorageFeatures& operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>(&rhs);
return *this;
}
- PhysicalDevice8BitStorageFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice8BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDevice8BitStorageFeaturesKHR & setStorageBuffer8BitAccess( vk::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
storageBuffer8BitAccess = storageBuffer8BitAccess_;
return *this;
}
- PhysicalDevice8BitStorageFeaturesKHR & setUniformAndStorageBuffer8BitAccess( vk::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
{
uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
return *this;
}
- PhysicalDevice8BitStorageFeaturesKHR & setStoragePushConstant8( vk::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
{
storagePushConstant8 = storagePushConstant8_;
return *this;
}
- operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
}
- operator VkPhysicalDevice8BitStorageFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
}
- bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -45131,58 +42039,41 @@ namespace VULKAN_HPP_NAMESPACE
&& ( storagePushConstant8 == rhs.storagePushConstant8 );
}
- bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDevice8BitStorageFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
};
- static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceASTCDecodeFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( vk::Bool32 decodeModeSharedExponent_ = 0 ) VULKAN_HPP_NOEXCEPT
- : decodeModeSharedExponent( decodeModeSharedExponent_ )
- {}
-
- PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 decodeModeSharedExponent;
- };
- static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceASTCDecodeFeaturesEXT : public layout::PhysicalDeviceASTCDecodeFeaturesEXT
+ struct PhysicalDeviceASTCDecodeFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( vk::Bool32 decodeModeSharedExponent_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceASTCDecodeFeaturesEXT( decodeModeSharedExponent_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT
+ : decodeModeSharedExponent( decodeModeSharedExponent_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceASTCDecodeFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceASTCDecodeFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>(&rhs);
return *this;
}
@@ -45192,7 +42083,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( vk::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
{
decodeModeSharedExponent = decodeModeSharedExponent_;
return *this;
@@ -45220,53 +42111,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceASTCDecodeFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
};
static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( vk::Bool32 advancedBlendCoherentOperations_ = 0 ) VULKAN_HPP_NOEXCEPT
- : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
- {}
-
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 advancedBlendCoherentOperations;
- };
- static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT : public layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT
+ struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( vk::Bool32 advancedBlendCoherentOperations_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT( advancedBlendCoherentOperations_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT
+ : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>(&rhs);
return *this;
}
@@ -45276,7 +42148,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( vk::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
{
advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
return *this;
@@ -45304,57 +42176,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceBlendOperationAdvancedFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
};
static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
- {
- protected:
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
- void* pNext = nullptr;
- uint32_t advancedBlendMaxColorAttachments;
- vk::Bool32 advancedBlendIndependentBlend;
- vk::Bool32 advancedBlendNonPremultipliedSrcColor;
- vk::Bool32 advancedBlendNonPremultipliedDstColor;
- vk::Bool32 advancedBlendCorrelatedOverlap;
- vk::Bool32 advancedBlendAllOperations;
- };
- static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT : public layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT
+ struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
{
- PhysicalDeviceBlendOperationAdvancedPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT()
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT
+ : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
+ , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
+ , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
+ , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
+ , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
+ , advancedBlendAllOperations( advancedBlendAllOperations_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>(&rhs);
return *this;
}
@@ -45385,61 +42244,128 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceBlendOperationAdvancedPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+ void* pNext = {};
+ uint32_t advancedBlendMaxColorAttachments = {};
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
+ VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
};
static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceBufferDeviceAddressFeatures
{
- struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
+ : bufferDeviceAddress( bufferDeviceAddress_ )
+ , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
+ , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( vk::Bool32 bufferDeviceAddress_ = 0,
- vk::Bool32 bufferDeviceAddressCaptureReplay_ = 0,
- vk::Bool32 bufferDeviceAddressMultiDevice_ = 0 ) VULKAN_HPP_NOEXCEPT
- : bufferDeviceAddress( bufferDeviceAddress_ )
- , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
- , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) - offsetof( PhysicalDeviceBufferDeviceAddressFeatures, pNext ) );
+ return *this;
+ }
- PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>(this) = rhs;
- }
+ PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>(this) = rhs;
- return *this;
- }
+ PhysicalDeviceBufferDeviceAddressFeatures& operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 bufferDeviceAddress;
- vk::Bool32 bufferDeviceAddressCaptureReplay;
- vk::Bool32 bufferDeviceAddressMultiDevice;
- };
- static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
+ PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
- struct PhysicalDeviceBufferDeviceAddressFeaturesEXT : public layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT
+ PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferDeviceAddress = bufferDeviceAddress_;
+ return *this;
+ }
+
+ PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+ return *this;
+ }
+
+ PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
+ }
+
+ operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
+ }
+
+ bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+ && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+ && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
+ }
+
+ bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+ };
+ static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeatures>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( vk::Bool32 bufferDeviceAddress_ = 0,
- vk::Bool32 bufferDeviceAddressCaptureReplay_ = 0,
- vk::Bool32 bufferDeviceAddressMultiDevice_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT( bufferDeviceAddress_, bufferDeviceAddressCaptureReplay_, bufferDeviceAddressMultiDevice_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT
+ : bufferDeviceAddress( bufferDeviceAddress_ )
+ , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
+ , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>(&rhs);
return *this;
}
@@ -45449,19 +42375,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( vk::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddress = bufferDeviceAddress_;
return *this;
}
- PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( vk::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
return *this;
}
- PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( vk::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
{
bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
return *this;
@@ -45491,53 +42417,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceBufferDeviceAddressFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
};
static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceCoherentMemoryFeaturesAMD
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( vk::Bool32 deviceCoherentMemory_ = 0 ) VULKAN_HPP_NOEXCEPT
- : deviceCoherentMemory( deviceCoherentMemory_ )
- {}
-
- PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>(this) = rhs;
- }
-
- PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
- void* pNext = nullptr;
- vk::Bool32 deviceCoherentMemory;
- };
- static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceCoherentMemoryFeaturesAMD : public layout::PhysicalDeviceCoherentMemoryFeaturesAMD
+ struct PhysicalDeviceCoherentMemoryFeaturesAMD
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( vk::Bool32 deviceCoherentMemory_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCoherentMemoryFeaturesAMD( deviceCoherentMemory_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceCoherentMemory( deviceCoherentMemory_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCoherentMemoryFeaturesAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceCoherentMemoryFeaturesAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>(&rhs);
return *this;
}
@@ -45547,7 +42456,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( vk::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
{
deviceCoherentMemory = deviceCoherentMemory_;
return *this;
@@ -45575,57 +42484,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceCoherentMemoryFeaturesAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
};
static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
{
- struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( vk::Bool32 computeDerivativeGroupQuads_ = 0,
- vk::Bool32 computeDerivativeGroupLinear_ = 0 ) VULKAN_HPP_NOEXCEPT
- : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
- , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
- {}
-
- PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 computeDerivativeGroupQuads;
- vk::Bool32 computeDerivativeGroupLinear;
- };
- static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceComputeShaderDerivativesFeaturesNV : public layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( vk::Bool32 computeDerivativeGroupQuads_ = 0,
- vk::Bool32 computeDerivativeGroupLinear_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV( computeDerivativeGroupQuads_, computeDerivativeGroupLinear_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT
+ : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
+ , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>(&rhs);
return *this;
}
@@ -45635,13 +42523,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( vk::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
{
computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
return *this;
}
- PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( vk::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
{
computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
return *this;
@@ -45670,57 +42558,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceComputeShaderDerivativesFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
+ VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
};
static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceConditionalRenderingFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( vk::Bool32 conditionalRendering_ = 0,
- vk::Bool32 inheritedConditionalRendering_ = 0 ) VULKAN_HPP_NOEXCEPT
- : conditionalRendering( conditionalRendering_ )
- , inheritedConditionalRendering( inheritedConditionalRendering_ )
- {}
-
- PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 conditionalRendering;
- vk::Bool32 inheritedConditionalRendering;
- };
- static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceConditionalRenderingFeaturesEXT : public layout::PhysicalDeviceConditionalRenderingFeaturesEXT
+ struct PhysicalDeviceConditionalRenderingFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( vk::Bool32 conditionalRendering_ = 0,
- vk::Bool32 inheritedConditionalRendering_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceConditionalRenderingFeaturesEXT( conditionalRendering_, inheritedConditionalRendering_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT
+ : conditionalRendering( conditionalRendering_ )
+ , inheritedConditionalRendering( inheritedConditionalRendering_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceConditionalRenderingFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceConditionalRenderingFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>(&rhs);
return *this;
}
@@ -45730,13 +42598,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( vk::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
{
conditionalRendering = conditionalRendering_;
return *this;
}
- PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( vk::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
{
inheritedConditionalRendering = inheritedConditionalRendering_;
return *this;
@@ -45765,60 +42633,51 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceConditionalRenderingFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
+ VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
};
static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceConservativeRasterizationPropertiesEXT
{
- struct PhysicalDeviceConservativeRasterizationPropertiesEXT
- {
- protected:
- PhysicalDeviceConservativeRasterizationPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
- void* pNext = nullptr;
- float primitiveOverestimationSize;
- float maxExtraPrimitiveOverestimationSize;
- float extraPrimitiveOverestimationSizeGranularity;
- vk::Bool32 primitiveUnderestimation;
- vk::Bool32 conservativePointAndLineRasterization;
- vk::Bool32 degenerateTrianglesRasterized;
- vk::Bool32 degenerateLinesRasterized;
- vk::Bool32 fullyCoveredFragmentShaderInputVariable;
- vk::Bool32 conservativeRasterizationPostDepthCoverage;
- };
- static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceConservativeRasterizationPropertiesEXT : public layout::PhysicalDeviceConservativeRasterizationPropertiesEXT
- {
- PhysicalDeviceConservativeRasterizationPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceConservativeRasterizationPropertiesEXT()
+ PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {},
+ float maxExtraPrimitiveOverestimationSize_ = {},
+ float extraPrimitiveOverestimationSizeGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT
+ : primitiveOverestimationSize( primitiveOverestimationSize_ )
+ , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
+ , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
+ , primitiveUnderestimation( primitiveUnderestimation_ )
+ , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
+ , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
+ , degenerateLinesRasterized( degenerateLinesRasterized_ )
+ , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
+ , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceConservativeRasterizationPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceConservativeRasterizationPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>(&rhs);
return *this;
}
@@ -45852,57 +42711,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceConservativeRasterizationPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
+ void* pNext = {};
+ float primitiveOverestimationSize = {};
+ float maxExtraPrimitiveOverestimationSize = {};
+ float extraPrimitiveOverestimationSizeGranularity = {};
+ VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
+ VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
+ VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
+ VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
+ VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
};
static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceCooperativeMatrixFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( vk::Bool32 cooperativeMatrix_ = 0,
- vk::Bool32 cooperativeMatrixRobustBufferAccess_ = 0 ) VULKAN_HPP_NOEXCEPT
- : cooperativeMatrix( cooperativeMatrix_ )
- , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
- {}
-
- PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 cooperativeMatrix;
- vk::Bool32 cooperativeMatrixRobustBufferAccess;
- };
- static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceCooperativeMatrixFeaturesNV : public layout::PhysicalDeviceCooperativeMatrixFeaturesNV
+ struct PhysicalDeviceCooperativeMatrixFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( vk::Bool32 cooperativeMatrix_ = 0,
- vk::Bool32 cooperativeMatrixRobustBufferAccess_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCooperativeMatrixFeaturesNV( cooperativeMatrix_, cooperativeMatrixRobustBufferAccess_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT
+ : cooperativeMatrix( cooperativeMatrix_ )
+ , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCooperativeMatrixFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceCooperativeMatrixFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>(&rhs);
return *this;
}
@@ -45912,13 +42758,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( vk::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrix = cooperativeMatrix_;
return *this;
}
- PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( vk::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
{
cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
return *this;
@@ -45947,52 +42793,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceCooperativeMatrixFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
+ VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
};
static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceCooperativeMatrixPropertiesNV
- {
- protected:
- PhysicalDeviceCooperativeMatrixPropertiesNV() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>(this) = rhs;
- }
-
- PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
- void* pNext = nullptr;
- vk::ShaderStageFlags cooperativeMatrixSupportedStages;
- };
- static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceCooperativeMatrixPropertiesNV : public layout::PhysicalDeviceCooperativeMatrixPropertiesNV
+ struct PhysicalDeviceCooperativeMatrixPropertiesNV
{
- PhysicalDeviceCooperativeMatrixPropertiesNV() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCooperativeMatrixPropertiesNV()
+ PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT
+ : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCooperativeMatrixPropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceCooperativeMatrixPropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>(&rhs);
return *this;
}
@@ -46018,53 +42847,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceCooperativeMatrixPropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
};
static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceCornerSampledImageFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( vk::Bool32 cornerSampledImage_ = 0 ) VULKAN_HPP_NOEXCEPT
- : cornerSampledImage( cornerSampledImage_ )
- {}
-
- PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 cornerSampledImage;
- };
- static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceCornerSampledImageFeaturesNV : public layout::PhysicalDeviceCornerSampledImageFeaturesNV
+ struct PhysicalDeviceCornerSampledImageFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( vk::Bool32 cornerSampledImage_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCornerSampledImageFeaturesNV( cornerSampledImage_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT
+ : cornerSampledImage( cornerSampledImage_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCornerSampledImageFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceCornerSampledImageFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>(&rhs);
return *this;
}
@@ -46074,7 +42884,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( vk::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
{
cornerSampledImage = cornerSampledImage_;
return *this;
@@ -46102,53 +42912,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceCornerSampledImageFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
};
static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceCoverageReductionModeFeaturesNV
{
- struct PhysicalDeviceCoverageReductionModeFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( vk::Bool32 coverageReductionMode_ = 0 ) VULKAN_HPP_NOEXCEPT
- : coverageReductionMode( coverageReductionMode_ )
- {}
-
- PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 coverageReductionMode;
- };
- static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceCoverageReductionModeFeaturesNV : public layout::PhysicalDeviceCoverageReductionModeFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( vk::Bool32 coverageReductionMode_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCoverageReductionModeFeaturesNV( coverageReductionMode_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT
+ : coverageReductionMode( coverageReductionMode_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceCoverageReductionModeFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceCoverageReductionModeFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>(&rhs);
return *this;
}
@@ -46158,7 +42949,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( vk::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageReductionMode = coverageReductionMode_;
return *this;
@@ -46186,53 +42977,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceCoverageReductionModeFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
};
static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
{
- struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( vk::Bool32 dedicatedAllocationImageAliasing_ = 0 ) VULKAN_HPP_NOEXCEPT
- : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
- {}
-
- PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 dedicatedAllocationImageAliasing;
- };
- static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : public layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( vk::Bool32 dedicatedAllocationImageAliasing_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( dedicatedAllocationImageAliasing_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(&rhs);
return *this;
}
@@ -46242,7 +43014,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( vk::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
{
dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
return *this;
@@ -46270,53 +43042,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
};
static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceDepthClipEnableFeaturesEXT
{
- struct PhysicalDeviceDepthClipEnableFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( vk::Bool32 depthClipEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : depthClipEnable( depthClipEnable_ )
- {}
-
- PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 depthClipEnable;
- };
- static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceDepthClipEnableFeaturesEXT : public layout::PhysicalDeviceDepthClipEnableFeaturesEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( vk::Bool32 depthClipEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDepthClipEnableFeaturesEXT( depthClipEnable_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : depthClipEnable( depthClipEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDepthClipEnableFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceDepthClipEnableFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>(&rhs);
return *this;
}
@@ -46326,7 +43079,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( vk::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClipEnable = depthClipEnable_;
return *this;
@@ -46354,69 +43107,54 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDepthClipEnableFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
};
static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceDepthStencilResolvePropertiesKHR
- {
- protected:
- PhysicalDeviceDepthStencilResolvePropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceDepthStencilResolvePropertiesKHR( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceDepthStencilResolvePropertiesKHR& operator=( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR;
- void* pNext = nullptr;
- vk::ResolveModeFlagsKHR supportedDepthResolveModes;
- vk::ResolveModeFlagsKHR supportedStencilResolveModes;
- vk::Bool32 independentResolveNone;
- vk::Bool32 independentResolve;
- };
- static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceDepthStencilResolvePropertiesKHR : public layout::PhysicalDeviceDepthStencilResolvePropertiesKHR
+ struct PhysicalDeviceDepthStencilResolveProperties
{
- PhysicalDeviceDepthStencilResolvePropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDepthStencilResolvePropertiesKHR()
+ PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT
+ : supportedDepthResolveModes( supportedDepthResolveModes_ )
+ , supportedStencilResolveModes( supportedStencilResolveModes_ )
+ , independentResolveNone( independentResolveNone_ )
+ , independentResolve( independentResolve_ )
{}
- PhysicalDeviceDepthStencilResolvePropertiesKHR( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDepthStencilResolvePropertiesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) - offsetof( PhysicalDeviceDepthStencilResolveProperties, pNext ) );
+ return *this;
+ }
- PhysicalDeviceDepthStencilResolvePropertiesKHR& operator=( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceDepthStencilResolvePropertiesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDeviceDepthStencilResolveProperties& operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>(&rhs);
return *this;
}
- operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
- operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolvePropertiesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
}
- bool operator==( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -46426,274 +43164,220 @@ namespace VULKAN_HPP_NAMESPACE
&& ( independentResolve == rhs.independentResolve );
}
- bool operator!=( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDepthStencilResolvePropertiesKHR::sType;
- };
- static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolvePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceDescriptorIndexingFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( vk::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0,
- vk::Bool32 descriptorBindingPartiallyBound_ = 0,
- vk::Bool32 descriptorBindingVariableDescriptorCount_ = 0,
- vk::Bool32 runtimeDescriptorArray_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
- , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
- , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
- , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
- , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
- , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
- , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
- , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
- , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
- , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
- , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
- , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
- , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
- , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
- , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
- , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
- , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
- , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
- , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
- , runtimeDescriptorArray( runtimeDescriptorArray_ )
- {}
-
- PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 shaderInputAttachmentArrayDynamicIndexing;
- vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing;
- vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing;
- vk::Bool32 shaderUniformBufferArrayNonUniformIndexing;
- vk::Bool32 shaderSampledImageArrayNonUniformIndexing;
- vk::Bool32 shaderStorageBufferArrayNonUniformIndexing;
- vk::Bool32 shaderStorageImageArrayNonUniformIndexing;
- vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing;
- vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing;
- vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing;
- vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind;
- vk::Bool32 descriptorBindingSampledImageUpdateAfterBind;
- vk::Bool32 descriptorBindingStorageImageUpdateAfterBind;
- vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind;
- vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
- vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
- vk::Bool32 descriptorBindingUpdateUnusedWhilePending;
- vk::Bool32 descriptorBindingPartiallyBound;
- vk::Bool32 descriptorBindingVariableDescriptorCount;
- vk::Bool32 runtimeDescriptorArray;
- };
- static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceDescriptorIndexingFeaturesEXT : public layout::PhysicalDeviceDescriptorIndexingFeaturesEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( vk::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = 0,
- vk::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderSampledImageArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderStorageImageArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = 0,
- vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = 0,
- vk::Bool32 descriptorBindingUpdateUnusedWhilePending_ = 0,
- vk::Bool32 descriptorBindingPartiallyBound_ = 0,
- vk::Bool32 descriptorBindingVariableDescriptorCount_ = 0,
- vk::Bool32 runtimeDescriptorArray_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDescriptorIndexingFeaturesEXT( shaderInputAttachmentArrayDynamicIndexing_, shaderUniformTexelBufferArrayDynamicIndexing_, shaderStorageTexelBufferArrayDynamicIndexing_, shaderUniformBufferArrayNonUniformIndexing_, shaderSampledImageArrayNonUniformIndexing_, shaderStorageBufferArrayNonUniformIndexing_, shaderStorageImageArrayNonUniformIndexing_, shaderInputAttachmentArrayNonUniformIndexing_, shaderUniformTexelBufferArrayNonUniformIndexing_, shaderStorageTexelBufferArrayNonUniformIndexing_, descriptorBindingUniformBufferUpdateAfterBind_, descriptorBindingSampledImageUpdateAfterBind_, descriptorBindingStorageImageUpdateAfterBind_, descriptorBindingStorageBufferUpdateAfterBind_, descriptorBindingUniformTexelBufferUpdateAfterBind_, descriptorBindingStorageTexelBufferUpdateAfterBind_, descriptorBindingUpdateUnusedWhilePending_, descriptorBindingPartiallyBound_, descriptorBindingVariableDescriptorCount_, runtimeDescriptorArray_ )
- {}
-
- PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDescriptorIndexingFeaturesEXT( rhs )
- {}
-
- PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- layout::PhysicalDeviceDescriptorIndexingFeaturesEXT::operator=(rhs);
- return *this;
- }
-
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+ };
+ static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolveProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceDescriptorIndexingFeatures
+ {
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
+ , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
+ , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
+ , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
+ , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
+ , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
+ , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
+ , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
+ , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
+ , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
+ , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
+ , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
+ , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
+ , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
+ , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
+ , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
+ , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
+ , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
+ , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
+ , runtimeDescriptorArray( runtimeDescriptorArray_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) - offsetof( PhysicalDeviceDescriptorIndexingFeatures, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeatures& operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>(&rhs);
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayDynamicIndexing( vk::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayDynamicIndexing( vk::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayDynamicIndexing( vk::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformBufferArrayNonUniformIndexing( vk::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderSampledImageArrayNonUniformIndexing( vk::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageBufferArrayNonUniformIndexing( vk::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageImageArrayNonUniformIndexing( vk::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayNonUniformIndexing( vk::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayNonUniformIndexing( vk::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayNonUniformIndexing( vk::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
{
shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformBufferUpdateAfterBind( vk::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingSampledImageUpdateAfterBind( vk::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageImageUpdateAfterBind( vk::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageBufferUpdateAfterBind( vk::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformTexelBufferUpdateAfterBind( vk::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageTexelBufferUpdateAfterBind( vk::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUpdateUnusedWhilePending( vk::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingPartiallyBound( vk::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingVariableDescriptorCount( vk::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
return *this;
}
- PhysicalDeviceDescriptorIndexingFeaturesEXT & setRuntimeDescriptorArray( vk::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
{
runtimeDescriptorArray = runtimeDescriptorArray_;
return *this;
}
- operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
- operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeaturesEXT*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
}
- bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -46719,93 +43403,116 @@ namespace VULKAN_HPP_NAMESPACE
&& ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
}
- bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDescriptorIndexingFeaturesEXT::sType;
- };
- static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceDescriptorIndexingPropertiesEXT
- {
- protected:
- PhysicalDeviceDescriptorIndexingPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceDescriptorIndexingPropertiesEXT( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceDescriptorIndexingPropertiesEXT& operator=( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT;
- void* pNext = nullptr;
- uint32_t maxUpdateAfterBindDescriptorsInAllPools;
- vk::Bool32 shaderUniformBufferArrayNonUniformIndexingNative;
- vk::Bool32 shaderSampledImageArrayNonUniformIndexingNative;
- vk::Bool32 shaderStorageBufferArrayNonUniformIndexingNative;
- vk::Bool32 shaderStorageImageArrayNonUniformIndexingNative;
- vk::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative;
- vk::Bool32 robustBufferAccessUpdateAfterBind;
- vk::Bool32 quadDivergentImplicitLod;
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
- uint32_t maxPerStageUpdateAfterBindResources;
- uint32_t maxDescriptorSetUpdateAfterBindSamplers;
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
- };
- static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceDescriptorIndexingPropertiesEXT : public layout::PhysicalDeviceDescriptorIndexingPropertiesEXT
- {
- PhysicalDeviceDescriptorIndexingPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDescriptorIndexingPropertiesEXT()
- {}
-
- PhysicalDeviceDescriptorIndexingPropertiesEXT( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDescriptorIndexingPropertiesEXT( rhs )
- {}
-
- PhysicalDeviceDescriptorIndexingPropertiesEXT& operator=( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- layout::PhysicalDeviceDescriptorIndexingPropertiesEXT::operator=(rhs);
- return *this;
- }
-
- operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>( this );
- }
-
- operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &() VULKAN_HPP_NOEXCEPT
- {
- return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingPropertiesEXT*>( this );
- }
-
- bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
+ VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+ };
+ static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeatures>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceDescriptorIndexingProperties
+ {
+ PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
+ uint32_t maxPerStageUpdateAfterBindResources_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
+ , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
+ , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
+ , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
+ , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
+ , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
+ , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
+ , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
+ , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
+ , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
+ , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
+ , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
+ , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
+ , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
+ , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
+ , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
+ , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
+ , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
+ , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
+ , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
+ , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
+ , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
+ , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) - offsetof( PhysicalDeviceDescriptorIndexingProperties, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PhysicalDeviceDescriptorIndexingProperties& operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>(&rhs);
+ return *this;
+ }
+
+ operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
+ }
+
+ operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
+ }
+
+ bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -46834,57 +43541,61 @@ namespace VULKAN_HPP_NAMESPACE
&& ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
}
- bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDescriptorIndexingPropertiesEXT::sType;
- };
- static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceDiscardRectanglePropertiesEXT
- {
- protected:
- PhysicalDeviceDiscardRectanglePropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
- void* pNext = nullptr;
- uint32_t maxDiscardRectangles;
- };
- static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceDiscardRectanglePropertiesEXT : public layout::PhysicalDeviceDiscardRectanglePropertiesEXT
- {
- PhysicalDeviceDiscardRectanglePropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDiscardRectanglePropertiesEXT()
- {}
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
+ void* pNext = {};
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+ uint32_t maxPerStageUpdateAfterBindResources = {};
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+ };
+ static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingProperties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceDiscardRectanglePropertiesEXT
+ {
+ PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxDiscardRectangles( maxDiscardRectangles_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) );
+ return *this;
+ }
PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDiscardRectanglePropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceDiscardRectanglePropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>(&rhs);
return *this;
}
@@ -46910,130 +43621,102 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDiscardRectanglePropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
+ void* pNext = {};
+ uint32_t maxDiscardRectangles = {};
};
static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceDriverProperties
{
- struct PhysicalDeviceDriverPropertiesKHR
+ PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
+ std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {},
+ std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {},
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT
+ : driverID( driverID_ )
+ , driverName{}
+ , driverInfo{}
+ , conformanceVersion( conformanceVersion_ )
{
- protected:
- PhysicalDeviceDriverPropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceDriverPropertiesKHR( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceDriverPropertiesKHR& operator=( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR;
- void* pNext = nullptr;
- vk::DriverIdKHR driverID;
- char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
- char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
- vk::ConformanceVersionKHR conformanceVersion;
- };
- static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE,VK_MAX_DRIVER_NAME_SIZE>::copy( driverName, driverName_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE,VK_MAX_DRIVER_INFO_SIZE>::copy( driverInfo, driverInfo_ );
+ }
- struct PhysicalDeviceDriverPropertiesKHR : public layout::PhysicalDeviceDriverPropertiesKHR
- {
- PhysicalDeviceDriverPropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDriverPropertiesKHR()
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) - offsetof( PhysicalDeviceDriverProperties, pNext ) );
+ return *this;
+ }
- PhysicalDeviceDriverPropertiesKHR( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceDriverPropertiesKHR( rhs )
- {}
+ PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceDriverPropertiesKHR& operator=( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceDriverProperties& operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceDriverPropertiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>(&rhs);
return *this;
}
- operator VkPhysicalDeviceDriverPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceDriverPropertiesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
}
- operator VkPhysicalDeviceDriverPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
}
- bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( driverID == rhs.driverID )
- && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 )
- && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 )
+ && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ) == 0 )
&& ( conformanceVersion == rhs.conformanceVersion );
}
- bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceDriverPropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+ char driverName[VK_MAX_DRIVER_NAME_SIZE] = {};
+ char driverInfo[VK_MAX_DRIVER_INFO_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
};
- static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceDriverPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceExclusiveScissorFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( vk::Bool32 exclusiveScissor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : exclusiveScissor( exclusiveScissor_ )
- {}
-
- PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>(this) = rhs;
- return *this;
- }
+ static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 exclusiveScissor;
- };
- static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceExclusiveScissorFeaturesNV : public layout::PhysicalDeviceExclusiveScissorFeaturesNV
+ struct PhysicalDeviceExclusiveScissorFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( vk::Bool32 exclusiveScissor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExclusiveScissorFeaturesNV( exclusiveScissor_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT
+ : exclusiveScissor( exclusiveScissor_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExclusiveScissorFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceExclusiveScissorFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>(&rhs);
return *this;
}
@@ -47043,7 +43726,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( vk::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
{
exclusiveScissor = exclusiveScissor_;
return *this;
@@ -47071,61 +43754,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceExclusiveScissorFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
};
static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceExternalBufferInfo
{
- struct PhysicalDeviceExternalBufferInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(),
- vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(),
- vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , usage( usage_ )
- , handleType( handleType_ )
- {}
-
- PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>(this) = rhs;
- }
-
- PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
- const void* pNext = nullptr;
- vk::BufferCreateFlags flags;
- vk::BufferUsageFlags usage;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceExternalBufferInfo : public layout::PhysicalDeviceExternalBufferInfo
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( vk::BufferCreateFlags flags_ = vk::BufferCreateFlags(),
- vk::BufferUsageFlags usage_ = vk::BufferUsageFlags(),
- vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalBufferInfo( flags_, usage_, handleType_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , usage( usage_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalBufferInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceExternalBufferInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>(&rhs);
return *this;
}
@@ -47135,19 +43795,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceExternalBufferInfo & setFlags( vk::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PhysicalDeviceExternalBufferInfo & setUsage( vk::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- PhysicalDeviceExternalBufferInfo & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -47169,7 +43829,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( flags == rhs.flags )
&& ( usage == rhs.usage )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -47177,53 +43837,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceExternalBufferInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceExternalFenceInfo
{
- struct PhysicalDeviceExternalFenceInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- {}
-
- PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>(this) = rhs;
- }
-
- PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
- const void* pNext = nullptr;
- vk::ExternalFenceHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceExternalFenceInfo : public layout::PhysicalDeviceExternalFenceInfo
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( vk::ExternalFenceHandleTypeFlagBits handleType_ = vk::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalFenceInfo( handleType_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalFenceInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceExternalFenceInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>(&rhs);
return *this;
}
@@ -47233,7 +43876,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceExternalFenceInfo & setHandleType( vk::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -47253,7 +43896,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -47261,53 +43904,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceExternalFenceInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceExternalImageFormatInfo
{
- struct PhysicalDeviceExternalImageFormatInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- {}
-
- PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>(this) = rhs;
- }
-
- PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
- const void* pNext = nullptr;
- vk::ExternalMemoryHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceExternalImageFormatInfo : public layout::PhysicalDeviceExternalImageFormatInfo
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( vk::ExternalMemoryHandleTypeFlagBits handleType_ = vk::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalImageFormatInfo( handleType_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalImageFormatInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceExternalImageFormatInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>(&rhs);
return *this;
}
@@ -47317,7 +43941,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceExternalImageFormatInfo & setHandleType( vk::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -47337,7 +43961,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -47345,52 +43969,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceExternalImageFormatInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceExternalMemoryHostPropertiesEXT
- {
- protected:
- PhysicalDeviceExternalMemoryHostPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
- void* pNext = nullptr;
- vk::DeviceSize minImportedHostPointerAlignment;
- };
- static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceExternalMemoryHostPropertiesEXT : public layout::PhysicalDeviceExternalMemoryHostPropertiesEXT
+ struct PhysicalDeviceExternalMemoryHostPropertiesEXT
{
- PhysicalDeviceExternalMemoryHostPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalMemoryHostPropertiesEXT()
+ PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
+ : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalMemoryHostPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceExternalMemoryHostPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>(&rhs);
return *this;
}
@@ -47416,53 +44022,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceExternalMemoryHostPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
};
static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceExternalSemaphoreInfo
{
- struct PhysicalDeviceExternalSemaphoreInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : handleType( handleType_ )
- {}
-
- PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>(this) = rhs;
- }
-
- PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
- const void* pNext = nullptr;
- vk::ExternalSemaphoreHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceExternalSemaphoreInfo : public layout::PhysicalDeviceExternalSemaphoreInfo
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalSemaphoreInfo( handleType_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceExternalSemaphoreInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceExternalSemaphoreInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>(&rhs);
return *this;
}
@@ -47472,7 +44059,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceExternalSemaphoreInfo & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -47492,7 +44079,7 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -47500,53 +44087,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceExternalSemaphoreInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceFeatures2
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( vk::PhysicalDeviceFeatures features_ = vk::PhysicalDeviceFeatures() ) VULKAN_HPP_NOEXCEPT
- : features( features_ )
- {}
-
- PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFeatures2*>(this) = rhs;
- }
-
- PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFeatures2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
- void* pNext = nullptr;
- vk::PhysicalDeviceFeatures features;
- };
- static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceFeatures2 : public layout::PhysicalDeviceFeatures2
+ struct PhysicalDeviceFeatures2
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( vk::PhysicalDeviceFeatures features_ = vk::PhysicalDeviceFeatures() ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFeatures2( features_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT
+ : features( features_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFeatures2( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceFeatures2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>(&rhs);
return *this;
}
@@ -47556,7 +44124,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceFeatures2 & setFeatures( vk::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT
{
features = features_;
return *this;
@@ -47584,82 +44152,80 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceFeatures2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
};
static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceFloatControlsProperties
{
- struct PhysicalDeviceFloatControlsPropertiesKHR
- {
- protected:
- PhysicalDeviceFloatControlsPropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceFloatControlsPropertiesKHR( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceFloatControlsPropertiesKHR& operator=( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceFloatControlsPropertiesKHR;
- void* pNext = nullptr;
- vk::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence;
- vk::ShaderFloatControlsIndependenceKHR roundingModeIndependence;
- vk::Bool32 shaderSignedZeroInfNanPreserveFloat16;
- vk::Bool32 shaderSignedZeroInfNanPreserveFloat32;
- vk::Bool32 shaderSignedZeroInfNanPreserveFloat64;
- vk::Bool32 shaderDenormPreserveFloat16;
- vk::Bool32 shaderDenormPreserveFloat32;
- vk::Bool32 shaderDenormPreserveFloat64;
- vk::Bool32 shaderDenormFlushToZeroFloat16;
- vk::Bool32 shaderDenormFlushToZeroFloat32;
- vk::Bool32 shaderDenormFlushToZeroFloat64;
- vk::Bool32 shaderRoundingModeRTEFloat16;
- vk::Bool32 shaderRoundingModeRTEFloat32;
- vk::Bool32 shaderRoundingModeRTEFloat64;
- vk::Bool32 shaderRoundingModeRTZFloat16;
- vk::Bool32 shaderRoundingModeRTZFloat32;
- vk::Bool32 shaderRoundingModeRTZFloat64;
- };
- static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceFloatControlsPropertiesKHR : public layout::PhysicalDeviceFloatControlsPropertiesKHR
- {
- PhysicalDeviceFloatControlsPropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFloatControlsPropertiesKHR()
+ PhysicalDeviceFloatControlsProperties( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT
+ : denormBehaviorIndependence( denormBehaviorIndependence_ )
+ , roundingModeIndependence( roundingModeIndependence_ )
+ , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
+ , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
+ , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
+ , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
+ , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
+ , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
+ , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
+ , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
+ , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
+ , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
+ , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
+ , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
+ , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
+ , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
+ , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
{}
- PhysicalDeviceFloatControlsPropertiesKHR( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFloatControlsPropertiesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) - offsetof( PhysicalDeviceFloatControlsProperties, pNext ) );
+ return *this;
+ }
- PhysicalDeviceFloatControlsPropertiesKHR& operator=( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceFloatControlsPropertiesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDeviceFloatControlsProperties& operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>(&rhs);
return *this;
}
- operator VkPhysicalDeviceFloatControlsPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceFloatControlsPropertiesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
}
- operator VkPhysicalDeviceFloatControlsPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceFloatControlsPropertiesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
}
- bool operator==( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -47682,59 +44248,59 @@ namespace VULKAN_HPP_NAMESPACE
&& ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
}
- bool operator!=( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceFloatControlsPropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
};
- static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceFragmentDensityMapFeaturesEXT
{
- struct PhysicalDeviceFragmentDensityMapFeaturesEXT
- {
- protected:
- PhysicalDeviceFragmentDensityMapFeaturesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 fragmentDensityMap;
- vk::Bool32 fragmentDensityMapDynamic;
- vk::Bool32 fragmentDensityMapNonSubsampledImages;
- };
- static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceFragmentDensityMapFeaturesEXT : public layout::PhysicalDeviceFragmentDensityMapFeaturesEXT
- {
- PhysicalDeviceFragmentDensityMapFeaturesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentDensityMapFeaturesEXT()
+ PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fragmentDensityMap( fragmentDensityMap_ )
+ , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ )
+ , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentDensityMapFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceFragmentDensityMapFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>(&rhs);
return *this;
}
@@ -47762,54 +44328,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceFragmentDensityMapFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
};
static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceFragmentDensityMapPropertiesEXT
- {
- protected:
- PhysicalDeviceFragmentDensityMapPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
- void* pNext = nullptr;
- vk::Extent2D minFragmentDensityTexelSize;
- vk::Extent2D maxFragmentDensityTexelSize;
- vk::Bool32 fragmentDensityInvocations;
- };
- static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceFragmentDensityMapPropertiesEXT : public layout::PhysicalDeviceFragmentDensityMapPropertiesEXT
+ struct PhysicalDeviceFragmentDensityMapPropertiesEXT
{
- PhysicalDeviceFragmentDensityMapPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentDensityMapPropertiesEXT()
+ PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT
+ : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
+ , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ )
+ , fragmentDensityInvocations( fragmentDensityInvocations_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentDensityMapPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceFragmentDensityMapPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>(&rhs);
return *this;
}
@@ -47837,53 +44389,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceFragmentDensityMapPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
};
static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( vk::Bool32 fragmentShaderBarycentric_ = 0 ) VULKAN_HPP_NOEXCEPT
- : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
- {}
-
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 fragmentShaderBarycentric;
- };
- static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV : public layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV
+ struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( vk::Bool32 fragmentShaderBarycentric_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV( fragmentShaderBarycentric_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>(&rhs);
return *this;
}
@@ -47893,7 +44428,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( vk::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderBarycentric = fragmentShaderBarycentric_;
return *this;
@@ -47921,61 +44456,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceFragmentShaderBarycentricFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
};
static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
{
- struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( vk::Bool32 fragmentShaderSampleInterlock_ = 0,
- vk::Bool32 fragmentShaderPixelInterlock_ = 0,
- vk::Bool32 fragmentShaderShadingRateInterlock_ = 0 ) VULKAN_HPP_NOEXCEPT
- : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
- , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
- , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
- {}
-
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 fragmentShaderSampleInterlock;
- vk::Bool32 fragmentShaderPixelInterlock;
- vk::Bool32 fragmentShaderShadingRateInterlock;
- };
- static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT : public layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( vk::Bool32 fragmentShaderSampleInterlock_ = 0,
- vk::Bool32 fragmentShaderPixelInterlock_ = 0,
- vk::Bool32 fragmentShaderShadingRateInterlock_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT( fragmentShaderSampleInterlock_, fragmentShaderPixelInterlock_, fragmentShaderShadingRateInterlock_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
+ , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
+ , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>(&rhs);
return *this;
}
@@ -47985,19 +44497,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( vk::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
return *this;
}
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( vk::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
return *this;
}
- PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( vk::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
{
fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
return *this;
@@ -48027,54 +44539,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceFragmentShaderInterlockFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
};
static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceGroupProperties
{
- struct PhysicalDeviceGroupProperties
+ PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
+ : physicalDeviceCount( physicalDeviceCount_ )
+ , physicalDevices{}
+ , subsetAllocation( subsetAllocation_ )
{
- protected:
- PhysicalDeviceGroupProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceGroupProperties*>(this) = rhs;
- }
-
- PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceGroupProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
- void* pNext = nullptr;
- uint32_t physicalDeviceCount;
- vk::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
- vk::Bool32 subsetAllocation;
- };
- static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE,VK_MAX_DEVICE_GROUP_SIZE>::copy( physicalDevices, physicalDevices_ );
+ }
- struct PhysicalDeviceGroupProperties : public layout::PhysicalDeviceGroupProperties
- {
- PhysicalDeviceGroupProperties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceGroupProperties()
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) );
+ return *this;
+ }
PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceGroupProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceGroupProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>(&rhs);
return *this;
}
@@ -48093,7 +44593,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( physicalDeviceCount == rhs.physicalDeviceCount )
- && ( memcmp( physicalDevices, rhs.physicalDevices, VK_MAX_DEVICE_GROUP_SIZE * sizeof( vk::PhysicalDevice ) ) == 0 )
+ && ( memcmp( physicalDevices, rhs.physicalDevices, std::min<uint32_t>( VK_MAX_DEVICE_GROUP_SIZE, physicalDeviceCount ) * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 )
&& ( subsetAllocation == rhs.subsetAllocation );
}
@@ -48102,140 +44602,113 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceGroupProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
+ void* pNext = {};
+ uint32_t physicalDeviceCount = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
};
static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceHostQueryResetFeatures
{
- struct PhysicalDeviceHostQueryResetFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( vk::Bool32 hostQueryReset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : hostQueryReset( hostQueryReset_ )
- {}
-
- PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 hostQueryReset;
- };
- static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceHostQueryResetFeaturesEXT : public layout::PhysicalDeviceHostQueryResetFeaturesEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( vk::Bool32 hostQueryReset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceHostQueryResetFeaturesEXT( hostQueryReset_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : hostQueryReset( hostQueryReset_ )
{}
- PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceHostQueryResetFeaturesEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) - offsetof( PhysicalDeviceHostQueryResetFeatures, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceHostQueryResetFeatures& operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceHostQueryResetFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceHostQueryResetFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceHostQueryResetFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceHostQueryResetFeaturesEXT & setHostQueryReset( vk::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
{
hostQueryReset = hostQueryReset_;
return *this;
}
- operator VkPhysicalDeviceHostQueryResetFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeaturesEXT*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
- operator VkPhysicalDeviceHostQueryResetFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeaturesEXT*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
}
- bool operator==( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( hostQueryReset == rhs.hostQueryReset );
}
- bool operator!=( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceHostQueryResetFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
};
- static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceIDProperties
{
- struct PhysicalDeviceIDProperties
+ PhysicalDeviceIDProperties( std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {},
+ std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {},
+ std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {},
+ uint32_t deviceNodeMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceUUID{}
+ , driverUUID{}
+ , deviceLUID{}
+ , deviceNodeMask( deviceNodeMask_ )
+ , deviceLUIDValid( deviceLUIDValid_ )
{
- protected:
- PhysicalDeviceIDProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceIDProperties*>(this) = rhs;
- }
-
- PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceIDProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
- void* pNext = nullptr;
- uint8_t deviceUUID[VK_UUID_SIZE];
- uint8_t driverUUID[VK_UUID_SIZE];
- uint8_t deviceLUID[VK_LUID_SIZE];
- uint32_t deviceNodeMask;
- vk::Bool32 deviceLUIDValid;
- };
- static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( deviceUUID, deviceUUID_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( driverUUID, driverUUID_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_LUID_SIZE,VK_LUID_SIZE>::copy( deviceLUID, deviceLUID_ );
+ }
- struct PhysicalDeviceIDProperties : public layout::PhysicalDeviceIDProperties
- {
- PhysicalDeviceIDProperties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceIDProperties()
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) );
+ return *this;
+ }
PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceIDProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceIDProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>(&rhs);
return *this;
}
@@ -48265,65 +44738,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceIDProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
+ void* pNext = {};
+ uint8_t deviceUUID[VK_UUID_SIZE] = {};
+ uint8_t driverUUID[VK_UUID_SIZE] = {};
+ uint8_t deviceLUID[VK_LUID_SIZE] = {};
+ uint32_t deviceNodeMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
};
static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceImageDrmFormatModifierInfoEXT
{
- struct PhysicalDeviceImageDrmFormatModifierInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0,
- vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : drmFormatModifier( drmFormatModifier_ )
- , sharingMode( sharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- {}
-
- PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>(this) = rhs;
- }
-
- PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
- const void* pNext = nullptr;
- uint64_t drmFormatModifier;
- vk::SharingMode sharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- };
- static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceImageDrmFormatModifierInfoEXT : public layout::PhysicalDeviceImageDrmFormatModifierInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = 0,
- vk::SharingMode sharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImageDrmFormatModifierInfoEXT( drmFormatModifier_, sharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : drmFormatModifier( drmFormatModifier_ )
+ , sharingMode( sharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImageDrmFormatModifierInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceImageDrmFormatModifierInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>(&rhs);
return *this;
}
@@ -48339,7 +44791,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( vk::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
{
sharingMode = sharingMode_;
return *this;
@@ -48382,69 +44834,45 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceImageDrmFormatModifierInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
+ const void* pNext = {};
+ uint64_t drmFormatModifier = {};
+ VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t* pQueueFamilyIndices = {};
};
static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceImageFormatInfo2
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined,
- vk::ImageType type_ = vk::ImageType::e1D,
- vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal,
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- vk::ImageCreateFlags flags_ = vk::ImageCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , type( type_ )
- , tiling( tiling_ )
- , usage( usage_ )
- , flags( flags_ )
- {}
-
- PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>(this) = rhs;
- }
-
- PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
- const void* pNext = nullptr;
- vk::Format format;
- vk::ImageType type;
- vk::ImageTiling tiling;
- vk::ImageUsageFlags usage;
- vk::ImageCreateFlags flags;
- };
- static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceImageFormatInfo2 : public layout::PhysicalDeviceImageFormatInfo2
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined,
- vk::ImageType type_ = vk::ImageType::e1D,
- vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal,
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- vk::ImageCreateFlags flags_ = vk::ImageCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImageFormatInfo2( format_, type_, tiling_, usage_, flags_ )
+ struct PhysicalDeviceImageFormatInfo2
+ {
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : format( format_ )
+ , type( type_ )
+ , tiling( tiling_ )
+ , usage( usage_ )
+ , flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImageFormatInfo2( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceImageFormatInfo2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>(&rhs);
return *this;
}
@@ -48454,31 +44882,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setType( vk::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setTiling( vk::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setUsage( vk::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- PhysicalDeviceImageFormatInfo2 & setFlags( vk::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -48510,53 +44938,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceImageFormatInfo2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
};
static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceImageViewImageFormatInfoEXT
{
- struct PhysicalDeviceImageViewImageFormatInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( vk::ImageViewType imageViewType_ = vk::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
- : imageViewType( imageViewType_ )
- {}
-
- PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>(this) = rhs;
- }
-
- PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
- void* pNext = nullptr;
- vk::ImageViewType imageViewType;
- };
- static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceImageViewImageFormatInfoEXT : public layout::PhysicalDeviceImageViewImageFormatInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( vk::ImageViewType imageViewType_ = vk::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImageViewImageFormatInfoEXT( imageViewType_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
+ : imageViewType( imageViewType_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImageViewImageFormatInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceImageViewImageFormatInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>(&rhs);
return *this;
}
@@ -48566,7 +44979,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( vk::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
{
imageViewType = imageViewType_;
return *this;
@@ -48594,137 +45007,99 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceImageViewImageFormatInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
};
static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceImagelessFramebufferFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( vk::Bool32 imagelessFramebuffer_ = 0 ) VULKAN_HPP_NOEXCEPT
- : imagelessFramebuffer( imagelessFramebuffer_ )
- {}
-
- PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 imagelessFramebuffer;
- };
- static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceImagelessFramebufferFeaturesKHR : public layout::PhysicalDeviceImagelessFramebufferFeaturesKHR
+ struct PhysicalDeviceImagelessFramebufferFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( vk::Bool32 imagelessFramebuffer_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImagelessFramebufferFeaturesKHR( imagelessFramebuffer_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : imagelessFramebuffer( imagelessFramebuffer_ )
{}
- PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceImagelessFramebufferFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) - offsetof( PhysicalDeviceImagelessFramebufferFeatures, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImagelessFramebufferFeatures& operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceImagelessFramebufferFeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceImagelessFramebufferFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImagelessFramebufferFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceImagelessFramebufferFeaturesKHR & setImagelessFramebuffer( vk::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
{
imagelessFramebuffer = imagelessFramebuffer_;
return *this;
}
- operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
- operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
}
- bool operator==( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( imagelessFramebuffer == rhs.imagelessFramebuffer );
}
- bool operator!=( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceImagelessFramebufferFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
};
- static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceIndexTypeUint8FeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( vk::Bool32 indexTypeUint8_ = 0 ) VULKAN_HPP_NOEXCEPT
- : indexTypeUint8( indexTypeUint8_ )
- {}
+ static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeatures>::value, "struct wrapper is not a standard layout!" );
- PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 indexTypeUint8;
- };
- static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceIndexTypeUint8FeaturesEXT : public layout::PhysicalDeviceIndexTypeUint8FeaturesEXT
+ struct PhysicalDeviceIndexTypeUint8FeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( vk::Bool32 indexTypeUint8_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceIndexTypeUint8FeaturesEXT( indexTypeUint8_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT
+ : indexTypeUint8( indexTypeUint8_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceIndexTypeUint8FeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceIndexTypeUint8FeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>(&rhs);
return *this;
}
@@ -48734,7 +45109,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( vk::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
{
indexTypeUint8 = indexTypeUint8_;
return *this;
@@ -48762,57 +45137,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceIndexTypeUint8FeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
};
static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceInlineUniformBlockFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( vk::Bool32 inlineUniformBlock_ = 0,
- vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = 0 ) VULKAN_HPP_NOEXCEPT
- : inlineUniformBlock( inlineUniformBlock_ )
- , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
- {}
-
- PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 inlineUniformBlock;
- vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind;
- };
- static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceInlineUniformBlockFeaturesEXT : public layout::PhysicalDeviceInlineUniformBlockFeaturesEXT
+ struct PhysicalDeviceInlineUniformBlockFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( vk::Bool32 inlineUniformBlock_ = 0,
- vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceInlineUniformBlockFeaturesEXT( inlineUniformBlock_, descriptorBindingInlineUniformBlockUpdateAfterBind_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
+ : inlineUniformBlock( inlineUniformBlock_ )
+ , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceInlineUniformBlockFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceInlineUniformBlockFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>(&rhs);
return *this;
}
@@ -48822,13 +45176,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( vk::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
{
inlineUniformBlock = inlineUniformBlock_;
return *this;
}
- PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( vk::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
{
descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
return *this;
@@ -48857,56 +45211,43 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceInlineUniformBlockFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
};
static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceInlineUniformBlockPropertiesEXT
{
- struct PhysicalDeviceInlineUniformBlockPropertiesEXT
- {
- protected:
- PhysicalDeviceInlineUniformBlockPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
- void* pNext = nullptr;
- uint32_t maxInlineUniformBlockSize;
- uint32_t maxPerStageDescriptorInlineUniformBlocks;
- uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
- uint32_t maxDescriptorSetInlineUniformBlocks;
- uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
- };
- static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceInlineUniformBlockPropertiesEXT : public layout::PhysicalDeviceInlineUniformBlockPropertiesEXT
- {
- PhysicalDeviceInlineUniformBlockPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceInlineUniformBlockPropertiesEXT()
+ PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {},
+ uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
+ uint32_t maxDescriptorSetInlineUniformBlocks_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
+ , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
+ , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
+ , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
+ , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceInlineUniformBlockPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceInlineUniformBlockPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>(&rhs);
return *this;
}
@@ -48936,25 +45277,249 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceInlineUniformBlockPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
+ void* pNext = {};
+ uint32_t maxInlineUniformBlockSize = {};
+ uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
+ uint32_t maxDescriptorSetInlineUniformBlocks = {};
+ uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
};
static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceLimits
{
- PhysicalDeviceLimits() VULKAN_HPP_NOEXCEPT
- {}
+ PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {},
+ uint32_t maxImageDimension2D_ = {},
+ uint32_t maxImageDimension3D_ = {},
+ uint32_t maxImageDimensionCube_ = {},
+ uint32_t maxImageArrayLayers_ = {},
+ uint32_t maxTexelBufferElements_ = {},
+ uint32_t maxUniformBufferRange_ = {},
+ uint32_t maxStorageBufferRange_ = {},
+ uint32_t maxPushConstantsSize_ = {},
+ uint32_t maxMemoryAllocationCount_ = {},
+ uint32_t maxSamplerAllocationCount_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {},
+ uint32_t maxBoundDescriptorSets_ = {},
+ uint32_t maxPerStageDescriptorSamplers_ = {},
+ uint32_t maxPerStageDescriptorUniformBuffers_ = {},
+ uint32_t maxPerStageDescriptorStorageBuffers_ = {},
+ uint32_t maxPerStageDescriptorSampledImages_ = {},
+ uint32_t maxPerStageDescriptorStorageImages_ = {},
+ uint32_t maxPerStageDescriptorInputAttachments_ = {},
+ uint32_t maxPerStageResources_ = {},
+ uint32_t maxDescriptorSetSamplers_ = {},
+ uint32_t maxDescriptorSetUniformBuffers_ = {},
+ uint32_t maxDescriptorSetUniformBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetStorageBuffers_ = {},
+ uint32_t maxDescriptorSetStorageBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetSampledImages_ = {},
+ uint32_t maxDescriptorSetStorageImages_ = {},
+ uint32_t maxDescriptorSetInputAttachments_ = {},
+ uint32_t maxVertexInputAttributes_ = {},
+ uint32_t maxVertexInputBindings_ = {},
+ uint32_t maxVertexInputAttributeOffset_ = {},
+ uint32_t maxVertexInputBindingStride_ = {},
+ uint32_t maxVertexOutputComponents_ = {},
+ uint32_t maxTessellationGenerationLevel_ = {},
+ uint32_t maxTessellationPatchSize_ = {},
+ uint32_t maxTessellationControlPerVertexInputComponents_ = {},
+ uint32_t maxTessellationControlPerVertexOutputComponents_ = {},
+ uint32_t maxTessellationControlPerPatchOutputComponents_ = {},
+ uint32_t maxTessellationControlTotalOutputComponents_ = {},
+ uint32_t maxTessellationEvaluationInputComponents_ = {},
+ uint32_t maxTessellationEvaluationOutputComponents_ = {},
+ uint32_t maxGeometryShaderInvocations_ = {},
+ uint32_t maxGeometryInputComponents_ = {},
+ uint32_t maxGeometryOutputComponents_ = {},
+ uint32_t maxGeometryOutputVertices_ = {},
+ uint32_t maxGeometryTotalOutputComponents_ = {},
+ uint32_t maxFragmentInputComponents_ = {},
+ uint32_t maxFragmentOutputAttachments_ = {},
+ uint32_t maxFragmentDualSrcAttachments_ = {},
+ uint32_t maxFragmentCombinedOutputResources_ = {},
+ uint32_t maxComputeSharedMemorySize_ = {},
+ std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = {},
+ uint32_t maxComputeWorkGroupInvocations_ = {},
+ std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = {},
+ uint32_t subPixelPrecisionBits_ = {},
+ uint32_t subTexelPrecisionBits_ = {},
+ uint32_t mipmapPrecisionBits_ = {},
+ uint32_t maxDrawIndexedIndexValue_ = {},
+ uint32_t maxDrawIndirectCount_ = {},
+ float maxSamplerLodBias_ = {},
+ float maxSamplerAnisotropy_ = {},
+ uint32_t maxViewports_ = {},
+ std::array<uint32_t,2> const& maxViewportDimensions_ = {},
+ std::array<float,2> const& viewportBoundsRange_ = {},
+ uint32_t viewportSubPixelBits_ = {},
+ size_t minMemoryMapAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {},
+ int32_t minTexelOffset_ = {},
+ uint32_t maxTexelOffset_ = {},
+ int32_t minTexelGatherOffset_ = {},
+ uint32_t maxTexelGatherOffset_ = {},
+ float minInterpolationOffset_ = {},
+ float maxInterpolationOffset_ = {},
+ uint32_t subPixelInterpolationOffsetBits_ = {},
+ uint32_t maxFramebufferWidth_ = {},
+ uint32_t maxFramebufferHeight_ = {},
+ uint32_t maxFramebufferLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {},
+ uint32_t maxColorAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {},
+ uint32_t maxSampleMaskWords_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {},
+ float timestampPeriod_ = {},
+ uint32_t maxClipDistances_ = {},
+ uint32_t maxCullDistances_ = {},
+ uint32_t maxCombinedClipAndCullDistances_ = {},
+ uint32_t discreteQueuePriorities_ = {},
+ std::array<float,2> const& pointSizeRange_ = {},
+ std::array<float,2> const& lineWidthRange_ = {},
+ float pointSizeGranularity_ = {},
+ float lineWidthGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxImageDimension1D( maxImageDimension1D_ )
+ , maxImageDimension2D( maxImageDimension2D_ )
+ , maxImageDimension3D( maxImageDimension3D_ )
+ , maxImageDimensionCube( maxImageDimensionCube_ )
+ , maxImageArrayLayers( maxImageArrayLayers_ )
+ , maxTexelBufferElements( maxTexelBufferElements_ )
+ , maxUniformBufferRange( maxUniformBufferRange_ )
+ , maxStorageBufferRange( maxStorageBufferRange_ )
+ , maxPushConstantsSize( maxPushConstantsSize_ )
+ , maxMemoryAllocationCount( maxMemoryAllocationCount_ )
+ , maxSamplerAllocationCount( maxSamplerAllocationCount_ )
+ , bufferImageGranularity( bufferImageGranularity_ )
+ , sparseAddressSpaceSize( sparseAddressSpaceSize_ )
+ , maxBoundDescriptorSets( maxBoundDescriptorSets_ )
+ , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ )
+ , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ )
+ , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ )
+ , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ )
+ , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ )
+ , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ )
+ , maxPerStageResources( maxPerStageResources_ )
+ , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ )
+ , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ )
+ , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ )
+ , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ )
+ , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ )
+ , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ )
+ , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ )
+ , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ )
+ , maxVertexInputAttributes( maxVertexInputAttributes_ )
+ , maxVertexInputBindings( maxVertexInputBindings_ )
+ , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ )
+ , maxVertexInputBindingStride( maxVertexInputBindingStride_ )
+ , maxVertexOutputComponents( maxVertexOutputComponents_ )
+ , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ )
+ , maxTessellationPatchSize( maxTessellationPatchSize_ )
+ , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ )
+ , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ )
+ , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ )
+ , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ )
+ , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ )
+ , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ )
+ , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ )
+ , maxGeometryInputComponents( maxGeometryInputComponents_ )
+ , maxGeometryOutputComponents( maxGeometryOutputComponents_ )
+ , maxGeometryOutputVertices( maxGeometryOutputVertices_ )
+ , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ )
+ , maxFragmentInputComponents( maxFragmentInputComponents_ )
+ , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ )
+ , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ )
+ , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ )
+ , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ )
+ , maxComputeWorkGroupCount{}
+ , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ )
+ , maxComputeWorkGroupSize{}
+ , subPixelPrecisionBits( subPixelPrecisionBits_ )
+ , subTexelPrecisionBits( subTexelPrecisionBits_ )
+ , mipmapPrecisionBits( mipmapPrecisionBits_ )
+ , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ )
+ , maxDrawIndirectCount( maxDrawIndirectCount_ )
+ , maxSamplerLodBias( maxSamplerLodBias_ )
+ , maxSamplerAnisotropy( maxSamplerAnisotropy_ )
+ , maxViewports( maxViewports_ )
+ , maxViewportDimensions{}
+ , viewportBoundsRange{}
+ , viewportSubPixelBits( viewportSubPixelBits_ )
+ , minMemoryMapAlignment( minMemoryMapAlignment_ )
+ , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ )
+ , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ )
+ , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ )
+ , minTexelOffset( minTexelOffset_ )
+ , maxTexelOffset( maxTexelOffset_ )
+ , minTexelGatherOffset( minTexelGatherOffset_ )
+ , maxTexelGatherOffset( maxTexelGatherOffset_ )
+ , minInterpolationOffset( minInterpolationOffset_ )
+ , maxInterpolationOffset( maxInterpolationOffset_ )
+ , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ )
+ , maxFramebufferWidth( maxFramebufferWidth_ )
+ , maxFramebufferHeight( maxFramebufferHeight_ )
+ , maxFramebufferLayers( maxFramebufferLayers_ )
+ , framebufferColorSampleCounts( framebufferColorSampleCounts_ )
+ , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ )
+ , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ )
+ , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ )
+ , maxColorAttachments( maxColorAttachments_ )
+ , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ )
+ , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ )
+ , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ )
+ , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ )
+ , storageImageSampleCounts( storageImageSampleCounts_ )
+ , maxSampleMaskWords( maxSampleMaskWords_ )
+ , timestampComputeAndGraphics( timestampComputeAndGraphics_ )
+ , timestampPeriod( timestampPeriod_ )
+ , maxClipDistances( maxClipDistances_ )
+ , maxCullDistances( maxCullDistances_ )
+ , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ )
+ , discreteQueuePriorities( discreteQueuePriorities_ )
+ , pointSizeRange{}
+ , lineWidthRange{}
+ , pointSizeGranularity( pointSizeGranularity_ )
+ , lineWidthGranularity( lineWidthGranularity_ )
+ , strictLines( strictLines_ )
+ , standardSampleLocations( standardSampleLocations_ )
+ , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
+ , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
+ , nonCoherentAtomSize( nonCoherentAtomSize_ )
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxComputeWorkGroupCount, maxComputeWorkGroupCount_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxComputeWorkGroupSize, maxComputeWorkGroupSize_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,2,2>::copy( maxViewportDimensions, maxViewportDimensions_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( viewportBoundsRange, viewportBoundsRange_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( pointSizeRange, pointSizeRange_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( lineWidthRange, lineWidthRange_ );
+ }
PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceLimits*>(this) = rhs;
+ *this = rhs;
}
PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceLimits*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>(&rhs);
return *this;
}
@@ -49084,177 +45649,146 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t maxImageDimension1D;
- uint32_t maxImageDimension2D;
- uint32_t maxImageDimension3D;
- uint32_t maxImageDimensionCube;
- uint32_t maxImageArrayLayers;
- uint32_t maxTexelBufferElements;
- uint32_t maxUniformBufferRange;
- uint32_t maxStorageBufferRange;
- uint32_t maxPushConstantsSize;
- uint32_t maxMemoryAllocationCount;
- uint32_t maxSamplerAllocationCount;
- vk::DeviceSize bufferImageGranularity;
- vk::DeviceSize sparseAddressSpaceSize;
- uint32_t maxBoundDescriptorSets;
- uint32_t maxPerStageDescriptorSamplers;
- uint32_t maxPerStageDescriptorUniformBuffers;
- uint32_t maxPerStageDescriptorStorageBuffers;
- uint32_t maxPerStageDescriptorSampledImages;
- uint32_t maxPerStageDescriptorStorageImages;
- uint32_t maxPerStageDescriptorInputAttachments;
- uint32_t maxPerStageResources;
- uint32_t maxDescriptorSetSamplers;
- uint32_t maxDescriptorSetUniformBuffers;
- uint32_t maxDescriptorSetUniformBuffersDynamic;
- uint32_t maxDescriptorSetStorageBuffers;
- uint32_t maxDescriptorSetStorageBuffersDynamic;
- uint32_t maxDescriptorSetSampledImages;
- uint32_t maxDescriptorSetStorageImages;
- uint32_t maxDescriptorSetInputAttachments;
- uint32_t maxVertexInputAttributes;
- uint32_t maxVertexInputBindings;
- uint32_t maxVertexInputAttributeOffset;
- uint32_t maxVertexInputBindingStride;
- uint32_t maxVertexOutputComponents;
- uint32_t maxTessellationGenerationLevel;
- uint32_t maxTessellationPatchSize;
- uint32_t maxTessellationControlPerVertexInputComponents;
- uint32_t maxTessellationControlPerVertexOutputComponents;
- uint32_t maxTessellationControlPerPatchOutputComponents;
- uint32_t maxTessellationControlTotalOutputComponents;
- uint32_t maxTessellationEvaluationInputComponents;
- uint32_t maxTessellationEvaluationOutputComponents;
- uint32_t maxGeometryShaderInvocations;
- uint32_t maxGeometryInputComponents;
- uint32_t maxGeometryOutputComponents;
- uint32_t maxGeometryOutputVertices;
- uint32_t maxGeometryTotalOutputComponents;
- uint32_t maxFragmentInputComponents;
- uint32_t maxFragmentOutputAttachments;
- uint32_t maxFragmentDualSrcAttachments;
- uint32_t maxFragmentCombinedOutputResources;
- uint32_t maxComputeSharedMemorySize;
- uint32_t maxComputeWorkGroupCount[3];
- uint32_t maxComputeWorkGroupInvocations;
- uint32_t maxComputeWorkGroupSize[3];
- uint32_t subPixelPrecisionBits;
- uint32_t subTexelPrecisionBits;
- uint32_t mipmapPrecisionBits;
- uint32_t maxDrawIndexedIndexValue;
- uint32_t maxDrawIndirectCount;
- float maxSamplerLodBias;
- float maxSamplerAnisotropy;
- uint32_t maxViewports;
- uint32_t maxViewportDimensions[2];
- float viewportBoundsRange[2];
- uint32_t viewportSubPixelBits;
- size_t minMemoryMapAlignment;
- vk::DeviceSize minTexelBufferOffsetAlignment;
- vk::DeviceSize minUniformBufferOffsetAlignment;
- vk::DeviceSize minStorageBufferOffsetAlignment;
- int32_t minTexelOffset;
- uint32_t maxTexelOffset;
- int32_t minTexelGatherOffset;
- uint32_t maxTexelGatherOffset;
- float minInterpolationOffset;
- float maxInterpolationOffset;
- uint32_t subPixelInterpolationOffsetBits;
- uint32_t maxFramebufferWidth;
- uint32_t maxFramebufferHeight;
- uint32_t maxFramebufferLayers;
- vk::SampleCountFlags framebufferColorSampleCounts;
- vk::SampleCountFlags framebufferDepthSampleCounts;
- vk::SampleCountFlags framebufferStencilSampleCounts;
- vk::SampleCountFlags framebufferNoAttachmentsSampleCounts;
- uint32_t maxColorAttachments;
- vk::SampleCountFlags sampledImageColorSampleCounts;
- vk::SampleCountFlags sampledImageIntegerSampleCounts;
- vk::SampleCountFlags sampledImageDepthSampleCounts;
- vk::SampleCountFlags sampledImageStencilSampleCounts;
- vk::SampleCountFlags storageImageSampleCounts;
- uint32_t maxSampleMaskWords;
- vk::Bool32 timestampComputeAndGraphics;
- float timestampPeriod;
- uint32_t maxClipDistances;
- uint32_t maxCullDistances;
- uint32_t maxCombinedClipAndCullDistances;
- uint32_t discreteQueuePriorities;
- float pointSizeRange[2];
- float lineWidthRange[2];
- float pointSizeGranularity;
- float lineWidthGranularity;
- vk::Bool32 strictLines;
- vk::Bool32 standardSampleLocations;
- vk::DeviceSize optimalBufferCopyOffsetAlignment;
- vk::DeviceSize optimalBufferCopyRowPitchAlignment;
- vk::DeviceSize nonCoherentAtomSize;
+ uint32_t maxImageDimension1D = {};
+ uint32_t maxImageDimension2D = {};
+ uint32_t maxImageDimension3D = {};
+ uint32_t maxImageDimensionCube = {};
+ uint32_t maxImageArrayLayers = {};
+ uint32_t maxTexelBufferElements = {};
+ uint32_t maxUniformBufferRange = {};
+ uint32_t maxStorageBufferRange = {};
+ uint32_t maxPushConstantsSize = {};
+ uint32_t maxMemoryAllocationCount = {};
+ uint32_t maxSamplerAllocationCount = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
+ uint32_t maxBoundDescriptorSets = {};
+ uint32_t maxPerStageDescriptorSamplers = {};
+ uint32_t maxPerStageDescriptorUniformBuffers = {};
+ uint32_t maxPerStageDescriptorStorageBuffers = {};
+ uint32_t maxPerStageDescriptorSampledImages = {};
+ uint32_t maxPerStageDescriptorStorageImages = {};
+ uint32_t maxPerStageDescriptorInputAttachments = {};
+ uint32_t maxPerStageResources = {};
+ uint32_t maxDescriptorSetSamplers = {};
+ uint32_t maxDescriptorSetUniformBuffers = {};
+ uint32_t maxDescriptorSetUniformBuffersDynamic = {};
+ uint32_t maxDescriptorSetStorageBuffers = {};
+ uint32_t maxDescriptorSetStorageBuffersDynamic = {};
+ uint32_t maxDescriptorSetSampledImages = {};
+ uint32_t maxDescriptorSetStorageImages = {};
+ uint32_t maxDescriptorSetInputAttachments = {};
+ uint32_t maxVertexInputAttributes = {};
+ uint32_t maxVertexInputBindings = {};
+ uint32_t maxVertexInputAttributeOffset = {};
+ uint32_t maxVertexInputBindingStride = {};
+ uint32_t maxVertexOutputComponents = {};
+ uint32_t maxTessellationGenerationLevel = {};
+ uint32_t maxTessellationPatchSize = {};
+ uint32_t maxTessellationControlPerVertexInputComponents = {};
+ uint32_t maxTessellationControlPerVertexOutputComponents = {};
+ uint32_t maxTessellationControlPerPatchOutputComponents = {};
+ uint32_t maxTessellationControlTotalOutputComponents = {};
+ uint32_t maxTessellationEvaluationInputComponents = {};
+ uint32_t maxTessellationEvaluationOutputComponents = {};
+ uint32_t maxGeometryShaderInvocations = {};
+ uint32_t maxGeometryInputComponents = {};
+ uint32_t maxGeometryOutputComponents = {};
+ uint32_t maxGeometryOutputVertices = {};
+ uint32_t maxGeometryTotalOutputComponents = {};
+ uint32_t maxFragmentInputComponents = {};
+ uint32_t maxFragmentOutputAttachments = {};
+ uint32_t maxFragmentDualSrcAttachments = {};
+ uint32_t maxFragmentCombinedOutputResources = {};
+ uint32_t maxComputeSharedMemorySize = {};
+ uint32_t maxComputeWorkGroupCount[3] = {};
+ uint32_t maxComputeWorkGroupInvocations = {};
+ uint32_t maxComputeWorkGroupSize[3] = {};
+ uint32_t subPixelPrecisionBits = {};
+ uint32_t subTexelPrecisionBits = {};
+ uint32_t mipmapPrecisionBits = {};
+ uint32_t maxDrawIndexedIndexValue = {};
+ uint32_t maxDrawIndirectCount = {};
+ float maxSamplerLodBias = {};
+ float maxSamplerAnisotropy = {};
+ uint32_t maxViewports = {};
+ uint32_t maxViewportDimensions[2] = {};
+ float viewportBoundsRange[2] = {};
+ uint32_t viewportSubPixelBits = {};
+ size_t minMemoryMapAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
+ int32_t minTexelOffset = {};
+ uint32_t maxTexelOffset = {};
+ int32_t minTexelGatherOffset = {};
+ uint32_t maxTexelGatherOffset = {};
+ float minInterpolationOffset = {};
+ float maxInterpolationOffset = {};
+ uint32_t subPixelInterpolationOffsetBits = {};
+ uint32_t maxFramebufferWidth = {};
+ uint32_t maxFramebufferHeight = {};
+ uint32_t maxFramebufferLayers = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
+ uint32_t maxColorAttachments = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
+ uint32_t maxSampleMaskWords = {};
+ VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
+ float timestampPeriod = {};
+ uint32_t maxClipDistances = {};
+ uint32_t maxCullDistances = {};
+ uint32_t maxCombinedClipAndCullDistances = {};
+ uint32_t discreteQueuePriorities = {};
+ float pointSizeRange[2] = {};
+ float lineWidthRange[2] = {};
+ float pointSizeGranularity = {};
+ float lineWidthGranularity = {};
+ VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
};
static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceLineRasterizationFeaturesEXT
{
- struct PhysicalDeviceLineRasterizationFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( vk::Bool32 rectangularLines_ = 0,
- vk::Bool32 bresenhamLines_ = 0,
- vk::Bool32 smoothLines_ = 0,
- vk::Bool32 stippledRectangularLines_ = 0,
- vk::Bool32 stippledBresenhamLines_ = 0,
- vk::Bool32 stippledSmoothLines_ = 0 ) VULKAN_HPP_NOEXCEPT
- : rectangularLines( rectangularLines_ )
- , bresenhamLines( bresenhamLines_ )
- , smoothLines( smoothLines_ )
- , stippledRectangularLines( stippledRectangularLines_ )
- , stippledBresenhamLines( stippledBresenhamLines_ )
- , stippledSmoothLines( stippledSmoothLines_ )
- {}
-
- PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 rectangularLines;
- vk::Bool32 bresenhamLines;
- vk::Bool32 smoothLines;
- vk::Bool32 stippledRectangularLines;
- vk::Bool32 stippledBresenhamLines;
- vk::Bool32 stippledSmoothLines;
- };
- static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceLineRasterizationFeaturesEXT : public layout::PhysicalDeviceLineRasterizationFeaturesEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( vk::Bool32 rectangularLines_ = 0,
- vk::Bool32 bresenhamLines_ = 0,
- vk::Bool32 smoothLines_ = 0,
- vk::Bool32 stippledRectangularLines_ = 0,
- vk::Bool32 stippledBresenhamLines_ = 0,
- vk::Bool32 stippledSmoothLines_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceLineRasterizationFeaturesEXT( rectangularLines_, bresenhamLines_, smoothLines_, stippledRectangularLines_, stippledBresenhamLines_, stippledSmoothLines_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT
+ : rectangularLines( rectangularLines_ )
+ , bresenhamLines( bresenhamLines_ )
+ , smoothLines( smoothLines_ )
+ , stippledRectangularLines( stippledRectangularLines_ )
+ , stippledBresenhamLines( stippledBresenhamLines_ )
+ , stippledSmoothLines( stippledSmoothLines_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceLineRasterizationFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceLineRasterizationFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>(&rhs);
return *this;
}
@@ -49264,37 +45798,37 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( vk::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
rectangularLines = rectangularLines_;
return *this;
}
- PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( vk::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
bresenhamLines = bresenhamLines_;
return *this;
}
- PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( vk::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
{
smoothLines = smoothLines_;
return *this;
}
- PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( vk::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledRectangularLines = stippledRectangularLines_;
return *this;
}
- PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( vk::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledBresenhamLines = stippledBresenhamLines_;
return *this;
}
- PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( vk::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
{
stippledSmoothLines = stippledSmoothLines_;
return *this;
@@ -49327,52 +45861,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceLineRasterizationFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
+ VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
};
static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceLineRasterizationPropertiesEXT
{
- struct PhysicalDeviceLineRasterizationPropertiesEXT
- {
- protected:
- PhysicalDeviceLineRasterizationPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
- void* pNext = nullptr;
- uint32_t lineSubPixelPrecisionBits;
- };
- static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceLineRasterizationPropertiesEXT : public layout::PhysicalDeviceLineRasterizationPropertiesEXT
- {
- PhysicalDeviceLineRasterizationPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceLineRasterizationPropertiesEXT()
+ PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT
+ : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceLineRasterizationPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceLineRasterizationPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>(&rhs);
return *this;
}
@@ -49398,53 +45919,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceLineRasterizationPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
+ void* pNext = {};
+ uint32_t lineSubPixelPrecisionBits = {};
};
static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceMaintenance3Properties
- {
- protected:
- PhysicalDeviceMaintenance3Properties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>(this) = rhs;
- }
-
- PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
- void* pNext = nullptr;
- uint32_t maxPerSetDescriptors;
- vk::DeviceSize maxMemoryAllocationSize;
- };
- static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMaintenance3Properties : public layout::PhysicalDeviceMaintenance3Properties
+ struct PhysicalDeviceMaintenance3Properties
{
- PhysicalDeviceMaintenance3Properties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMaintenance3Properties()
+ PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxPerSetDescriptors( maxPerSetDescriptors_ )
+ , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMaintenance3Properties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMaintenance3Properties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>(&rhs);
return *this;
}
@@ -49471,53 +45975,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMaintenance3Properties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
+ void* pNext = {};
+ uint32_t maxPerSetDescriptors = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
};
static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceMemoryBudgetPropertiesEXT
{
- struct PhysicalDeviceMemoryBudgetPropertiesEXT
+ PhysicalDeviceMemoryBudgetPropertiesEXT( std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT
+ : heapBudget{}
+ , heapUsage{}
{
- protected:
- PhysicalDeviceMemoryBudgetPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
- void* pNext = nullptr;
- vk::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS];
- vk::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS];
- };
- static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( heapBudget, heapBudget_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( heapUsage, heapUsage_ );
+ }
- struct PhysicalDeviceMemoryBudgetPropertiesEXT : public layout::PhysicalDeviceMemoryBudgetPropertiesEXT
- {
- PhysicalDeviceMemoryBudgetPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMemoryBudgetPropertiesEXT()
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) );
+ return *this;
+ }
PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMemoryBudgetPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMemoryBudgetPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>(&rhs);
return *this;
}
@@ -49535,8 +46026,8 @@ namespace VULKAN_HPP_NAMESPACE
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( vk::DeviceSize ) ) == 0 )
- && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( vk::DeviceSize ) ) == 0 );
+ && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 )
+ && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 );
}
bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -49544,53 +46035,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMemoryBudgetPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS] = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS] = {};
};
static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceMemoryPriorityFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( vk::Bool32 memoryPriority_ = 0 ) VULKAN_HPP_NOEXCEPT
- : memoryPriority( memoryPriority_ )
- {}
-
- PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 memoryPriority;
- };
- static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMemoryPriorityFeaturesEXT : public layout::PhysicalDeviceMemoryPriorityFeaturesEXT
+ struct PhysicalDeviceMemoryPriorityFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( vk::Bool32 memoryPriority_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMemoryPriorityFeaturesEXT( memoryPriority_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryPriority( memoryPriority_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMemoryPriorityFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMemoryPriorityFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>(&rhs);
return *this;
}
@@ -49600,7 +46073,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( vk::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
{
memoryPriority = memoryPriority_;
return *this;
@@ -49628,25 +46101,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMemoryPriorityFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
};
static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceMemoryProperties
{
- PhysicalDeviceMemoryProperties() VULKAN_HPP_NOEXCEPT
- {}
+ PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = {},
+ uint32_t memoryHeapCount_ = {},
+ std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryTypeCount( memoryTypeCount_ )
+ , memoryTypes{}
+ , memoryHeapCount( memoryHeapCount_ )
+ , memoryHeaps{}
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES,VK_MAX_MEMORY_TYPES>::copy( memoryTypes, memoryTypes_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS,VK_MAX_MEMORY_HEAPS>::copy( memoryHeaps, memoryHeaps_ );
+ }
PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>(this) = rhs;
+ *this = rhs;
}
PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>(&rhs);
return *this;
}
@@ -49663,9 +46148,9 @@ namespace VULKAN_HPP_NAMESPACE
bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( memoryTypeCount == rhs.memoryTypeCount )
- && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( vk::MemoryType ) ) == 0 )
+ && ( memcmp( memoryTypes, rhs.memoryTypes, std::min<uint32_t>( VK_MAX_MEMORY_TYPES, memoryTypeCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 )
&& ( memoryHeapCount == rhs.memoryHeapCount )
- && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( vk::MemoryHeap ) ) == 0 );
+ && ( memcmp( memoryHeaps, rhs.memoryHeaps, std::min<uint32_t>( VK_MAX_MEMORY_HEAPS, memoryHeapCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 );
}
bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -49674,54 +46159,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t memoryTypeCount;
- vk::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
- uint32_t memoryHeapCount;
- vk::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
+ uint32_t memoryTypeCount = {};
+ VULKAN_HPP_NAMESPACE::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES] = {};
+ uint32_t memoryHeapCount = {};
+ VULKAN_HPP_NAMESPACE::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS] = {};
};
static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceMemoryProperties2
{
- struct PhysicalDeviceMemoryProperties2
- {
- protected:
- PhysicalDeviceMemoryProperties2() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>(this) = rhs;
- }
-
- PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
- void* pNext = nullptr;
- vk::PhysicalDeviceMemoryProperties memoryProperties;
- };
- static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMemoryProperties2 : public layout::PhysicalDeviceMemoryProperties2
- {
- PhysicalDeviceMemoryProperties2() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMemoryProperties2()
+ PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryProperties( memoryProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMemoryProperties2( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMemoryProperties2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>(&rhs);
return *this;
}
@@ -49747,57 +46212,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMemoryProperties2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
};
static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceMeshShaderFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( vk::Bool32 taskShader_ = 0,
- vk::Bool32 meshShader_ = 0 ) VULKAN_HPP_NOEXCEPT
- : taskShader( taskShader_ )
- , meshShader( meshShader_ )
- {}
-
- PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 taskShader;
- vk::Bool32 meshShader;
- };
- static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMeshShaderFeaturesNV : public layout::PhysicalDeviceMeshShaderFeaturesNV
+ struct PhysicalDeviceMeshShaderFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( vk::Bool32 taskShader_ = 0,
- vk::Bool32 meshShader_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMeshShaderFeaturesNV( taskShader_, meshShader_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT
+ : taskShader( taskShader_ )
+ , meshShader( meshShader_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMeshShaderFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMeshShaderFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>(&rhs);
return *this;
}
@@ -49807,13 +46251,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( vk::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
{
taskShader = taskShader_;
return *this;
}
- PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( vk::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
{
meshShader = meshShader_;
return *this;
@@ -49842,64 +46286,62 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMeshShaderFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
};
static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceMeshShaderPropertiesNV
- {
- protected:
- PhysicalDeviceMeshShaderPropertiesNV() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>(this) = rhs;
- }
-
- PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
- void* pNext = nullptr;
- uint32_t maxDrawMeshTasksCount;
- uint32_t maxTaskWorkGroupInvocations;
- uint32_t maxTaskWorkGroupSize[3];
- uint32_t maxTaskTotalMemorySize;
- uint32_t maxTaskOutputCount;
- uint32_t maxMeshWorkGroupInvocations;
- uint32_t maxMeshWorkGroupSize[3];
- uint32_t maxMeshTotalMemorySize;
- uint32_t maxMeshOutputVertices;
- uint32_t maxMeshOutputPrimitives;
- uint32_t maxMeshMultiviewViewCount;
- uint32_t meshOutputPerVertexGranularity;
- uint32_t meshOutputPerPrimitiveGranularity;
- };
- static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMeshShaderPropertiesNV : public layout::PhysicalDeviceMeshShaderPropertiesNV
- {
- PhysicalDeviceMeshShaderPropertiesNV() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMeshShaderPropertiesNV()
- {}
+ struct PhysicalDeviceMeshShaderPropertiesNV
+ {
+ PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {},
+ uint32_t maxTaskWorkGroupInvocations_ = {},
+ std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = {},
+ uint32_t maxTaskTotalMemorySize_ = {},
+ uint32_t maxTaskOutputCount_ = {},
+ uint32_t maxMeshWorkGroupInvocations_ = {},
+ std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = {},
+ uint32_t maxMeshTotalMemorySize_ = {},
+ uint32_t maxMeshOutputVertices_ = {},
+ uint32_t maxMeshOutputPrimitives_ = {},
+ uint32_t maxMeshMultiviewViewCount_ = {},
+ uint32_t meshOutputPerVertexGranularity_ = {},
+ uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
+ , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
+ , maxTaskWorkGroupSize{}
+ , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
+ , maxTaskOutputCount( maxTaskOutputCount_ )
+ , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
+ , maxMeshWorkGroupSize{}
+ , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ )
+ , maxMeshOutputVertices( maxMeshOutputVertices_ )
+ , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
+ , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
+ , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
+ , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxTaskWorkGroupSize, maxTaskWorkGroupSize_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( maxMeshWorkGroupSize, maxMeshWorkGroupSize_ );
+ }
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) );
+ return *this;
+ }
PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMeshShaderPropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMeshShaderPropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>(&rhs);
return *this;
}
@@ -49937,61 +46379,50 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMeshShaderPropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
+ void* pNext = {};
+ uint32_t maxDrawMeshTasksCount = {};
+ uint32_t maxTaskWorkGroupInvocations = {};
+ uint32_t maxTaskWorkGroupSize[3] = {};
+ uint32_t maxTaskTotalMemorySize = {};
+ uint32_t maxTaskOutputCount = {};
+ uint32_t maxMeshWorkGroupInvocations = {};
+ uint32_t maxMeshWorkGroupSize[3] = {};
+ uint32_t maxMeshTotalMemorySize = {};
+ uint32_t maxMeshOutputVertices = {};
+ uint32_t maxMeshOutputPrimitives = {};
+ uint32_t maxMeshMultiviewViewCount = {};
+ uint32_t meshOutputPerVertexGranularity = {};
+ uint32_t meshOutputPerPrimitiveGranularity = {};
};
static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceMultiviewFeatures
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( vk::Bool32 multiview_ = 0,
- vk::Bool32 multiviewGeometryShader_ = 0,
- vk::Bool32 multiviewTessellationShader_ = 0 ) VULKAN_HPP_NOEXCEPT
- : multiview( multiview_ )
- , multiviewGeometryShader( multiviewGeometryShader_ )
- , multiviewTessellationShader( multiviewTessellationShader_ )
- {}
-
- PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>(this) = rhs;
- }
-
- PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
- void* pNext = nullptr;
- vk::Bool32 multiview;
- vk::Bool32 multiviewGeometryShader;
- vk::Bool32 multiviewTessellationShader;
- };
- static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMultiviewFeatures : public layout::PhysicalDeviceMultiviewFeatures
+ struct PhysicalDeviceMultiviewFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( vk::Bool32 multiview_ = 0,
- vk::Bool32 multiviewGeometryShader_ = 0,
- vk::Bool32 multiviewTessellationShader_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMultiviewFeatures( multiview_, multiviewGeometryShader_, multiviewTessellationShader_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT
+ : multiview( multiview_ )
+ , multiviewGeometryShader( multiviewGeometryShader_ )
+ , multiviewTessellationShader( multiviewTessellationShader_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMultiviewFeatures( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMultiviewFeatures::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>(&rhs);
return *this;
}
@@ -50001,19 +46432,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceMultiviewFeatures & setMultiview( vk::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
{
multiview = multiview_;
return *this;
}
- PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( vk::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewGeometryShader = multiviewGeometryShader_;
return *this;
}
- PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( vk::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
{
multiviewTessellationShader = multiviewTessellationShader_;
return *this;
@@ -50043,52 +46474,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMultiviewFeatures::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
};
static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
- {
- protected:
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>(this) = rhs;
- }
-
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
- void* pNext = nullptr;
- vk::Bool32 perViewPositionAllComponents;
- };
- static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : public layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+ struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
{
- PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX()
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT
+ : perViewPositionAllComponents( perViewPositionAllComponents_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>(&rhs);
return *this;
}
@@ -50114,53 +46529,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
};
static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceMultiviewProperties
{
- struct PhysicalDeviceMultiviewProperties
- {
- protected:
- PhysicalDeviceMultiviewProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>(this) = rhs;
- }
-
- PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
- void* pNext = nullptr;
- uint32_t maxMultiviewViewCount;
- uint32_t maxMultiviewInstanceIndex;
- };
- static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceMultiviewProperties : public layout::PhysicalDeviceMultiviewProperties
- {
- PhysicalDeviceMultiviewProperties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMultiviewProperties()
+ PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {},
+ uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxMultiviewViewCount( maxMultiviewViewCount_ )
+ , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceMultiviewProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceMultiviewProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>(&rhs);
return *this;
}
@@ -50187,55 +46585,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceMultiviewProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
+ void* pNext = {};
+ uint32_t maxMultiviewViewCount = {};
+ uint32_t maxMultiviewInstanceIndex = {};
};
static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDevicePCIBusInfoPropertiesEXT
{
- struct PhysicalDevicePCIBusInfoPropertiesEXT
- {
- protected:
- PhysicalDevicePCIBusInfoPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
- void* pNext = nullptr;
- uint32_t pciDomain;
- uint32_t pciBus;
- uint32_t pciDevice;
- uint32_t pciFunction;
- };
- static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDevicePCIBusInfoPropertiesEXT : public layout::PhysicalDevicePCIBusInfoPropertiesEXT
- {
- PhysicalDevicePCIBusInfoPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePCIBusInfoPropertiesEXT()
+ PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {},
+ uint32_t pciBus_ = {},
+ uint32_t pciDevice_ = {},
+ uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pciDomain( pciDomain_ )
+ , pciBus( pciBus_ )
+ , pciDevice( pciDevice_ )
+ , pciFunction( pciFunction_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePCIBusInfoPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDevicePCIBusInfoPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>(&rhs);
return *this;
}
@@ -50264,53 +46648,165 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDevicePCIBusInfoPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
+ void* pNext = {};
+ uint32_t pciDomain = {};
+ uint32_t pciBus = {};
+ uint32_t pciDevice = {};
+ uint32_t pciFunction = {};
};
static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDevicePerformanceQueryFeaturesKHR
{
- struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT
+ : performanceCounterQueryPools( performanceCounterQueryPools_ )
+ , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( vk::Bool32 pipelineExecutableInfo_ = 0 ) VULKAN_HPP_NOEXCEPT
- : pipelineExecutableInfo( pipelineExecutableInfo_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) - offsetof( PhysicalDevicePerformanceQueryFeaturesKHR, pNext ) );
+ return *this;
+ }
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>(this) = rhs;
- }
+ PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>(this) = rhs;
- return *this;
- }
+ PhysicalDevicePerformanceQueryFeaturesKHR& operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 pipelineExecutableInfo;
- };
- static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
+ PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
+ {
+ performanceCounterQueryPools = performanceCounterQueryPools_;
+ return *this;
+ }
+
+ PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
+ {
+ performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
+ return *this;
+ }
+
+ operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+ }
+
+ operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
+ }
+
+ bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
+ && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
+ }
- struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR : public layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+ bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
+ VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
+ };
+ static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDevicePerformanceQueryPropertiesKHR
{
- VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( vk::Bool32 pipelineExecutableInfo_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( pipelineExecutableInfo_ )
+ PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT
+ : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
{}
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( rhs )
+ VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) - offsetof( PhysicalDevicePerformanceQueryPropertiesKHR, pNext ) );
+ return *this;
+ }
+
+ PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PhysicalDevicePerformanceQueryPropertiesKHR& operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>(&rhs);
+ return *this;
+ }
+
+ operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+ }
+
+ operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
+ }
+
+ bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
+ }
+
+ bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
+ };
+ static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+ {
+ VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pipelineExecutableInfo( pipelineExecutableInfo_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) );
+ return *this;
+ }
+
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>(&rhs);
return *this;
}
@@ -50320,7 +46816,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( vk::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
{
pipelineExecutableInfo = pipelineExecutableInfo_;
return *this;
@@ -50348,52 +46844,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
};
static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDevicePointClippingProperties
- {
- protected:
- PhysicalDevicePointClippingProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>(this) = rhs;
- }
-
- PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
- void* pNext = nullptr;
- vk::PointClippingBehavior pointClippingBehavior;
- };
- static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDevicePointClippingProperties : public layout::PhysicalDevicePointClippingProperties
+ struct PhysicalDevicePointClippingProperties
{
- PhysicalDevicePointClippingProperties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePointClippingProperties()
+ PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT
+ : pointClippingBehavior( pointClippingBehavior_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) );
+ return *this;
+ }
+
PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePointClippingProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDevicePointClippingProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>(&rhs);
return *this;
}
@@ -50419,25 +46897,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDevicePointClippingProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
};
static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceSparseProperties
{
- PhysicalDeviceSparseProperties() VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT
+ : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
+ , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
+ , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
+ , residencyAlignedMipSize( residencyAlignedMipSize_ )
+ , residencyNonResidentStrict( residencyNonResidentStrict_ )
{}
PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceSparseProperties*>(this) = rhs;
+ *this = rhs;
}
PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceSparseProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>(&rhs);
return *this;
}
@@ -50466,28 +46955,48 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Bool32 residencyStandard2DBlockShape;
- vk::Bool32 residencyStandard2DMultisampleBlockShape;
- vk::Bool32 residencyStandard3DBlockShape;
- vk::Bool32 residencyAlignedMipSize;
- vk::Bool32 residencyNonResidentStrict;
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
+ VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
};
static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
struct PhysicalDeviceProperties
{
- PhysicalDeviceProperties() VULKAN_HPP_NOEXCEPT
- {}
+ PhysicalDeviceProperties( uint32_t apiVersion_ = {},
+ uint32_t driverVersion_ = {},
+ uint32_t vendorID_ = {},
+ uint32_t deviceID_ = {},
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
+ std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = {},
+ std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = {},
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {},
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : apiVersion( apiVersion_ )
+ , driverVersion( driverVersion_ )
+ , vendorID( vendorID_ )
+ , deviceID( deviceID_ )
+ , deviceType( deviceType_ )
+ , deviceName{}
+ , pipelineCacheUUID{}
+ , limits( limits_ )
+ , sparseProperties( sparseProperties_ )
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE>::copy( deviceName, deviceName_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( pipelineCacheUUID, pipelineCacheUUID_ );
+ }
PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceProperties*>(this) = rhs;
+ *this = rhs;
}
PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPhysicalDeviceProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>(&rhs);
return *this;
}
@@ -50520,59 +47029,39 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t apiVersion;
- uint32_t driverVersion;
- uint32_t vendorID;
- uint32_t deviceID;
- vk::PhysicalDeviceType deviceType;
- char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
- uint8_t pipelineCacheUUID[VK_UUID_SIZE];
- vk::PhysicalDeviceLimits limits;
- vk::PhysicalDeviceSparseProperties sparseProperties;
+ uint32_t apiVersion = {};
+ uint32_t driverVersion = {};
+ uint32_t vendorID = {};
+ uint32_t deviceID = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
+ char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] = {};
+ uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
};
static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceProperties2
- {
- protected:
- PhysicalDeviceProperties2() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceProperties2*>(this) = rhs;
- }
-
- PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceProperties2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceProperties2;
- void* pNext = nullptr;
- vk::PhysicalDeviceProperties properties;
- };
- static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceProperties2 : public layout::PhysicalDeviceProperties2
+ struct PhysicalDeviceProperties2
{
- PhysicalDeviceProperties2() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceProperties2()
+ PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : properties( properties_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceProperties2( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceProperties2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>(&rhs);
return *this;
}
@@ -50598,53 +47087,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceProperties2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
};
static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceProtectedMemoryFeatures
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( vk::Bool32 protectedMemory_ = 0 ) VULKAN_HPP_NOEXCEPT
- : protectedMemory( protectedMemory_ )
- {}
-
- PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>(this) = rhs;
- }
-
- PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
- void* pNext = nullptr;
- vk::Bool32 protectedMemory;
- };
- static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceProtectedMemoryFeatures : public layout::PhysicalDeviceProtectedMemoryFeatures
+ struct PhysicalDeviceProtectedMemoryFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( vk::Bool32 protectedMemory_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceProtectedMemoryFeatures( protectedMemory_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT
+ : protectedMemory( protectedMemory_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceProtectedMemoryFeatures( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceProtectedMemoryFeatures::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>(&rhs);
return *this;
}
@@ -50654,7 +47124,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( vk::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
{
protectedMemory = protectedMemory_;
return *this;
@@ -50682,52 +47152,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceProtectedMemoryFeatures::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
};
static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceProtectedMemoryProperties
- {
- protected:
- PhysicalDeviceProtectedMemoryProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>(this) = rhs;
- }
-
- PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
- void* pNext = nullptr;
- vk::Bool32 protectedNoFault;
- };
- static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceProtectedMemoryProperties : public layout::PhysicalDeviceProtectedMemoryProperties
+ struct PhysicalDeviceProtectedMemoryProperties
{
- PhysicalDeviceProtectedMemoryProperties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceProtectedMemoryProperties()
+ PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT
+ : protectedNoFault( protectedNoFault_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceProtectedMemoryProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceProtectedMemoryProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>(&rhs);
return *this;
}
@@ -50753,52 +47205,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceProtectedMemoryProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
};
static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDevicePushDescriptorPropertiesKHR
{
- struct PhysicalDevicePushDescriptorPropertiesKHR
- {
- protected:
- PhysicalDevicePushDescriptorPropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>(this) = rhs;
- }
-
- PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
- void* pNext = nullptr;
- uint32_t maxPushDescriptors;
- };
- static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDevicePushDescriptorPropertiesKHR : public layout::PhysicalDevicePushDescriptorPropertiesKHR
- {
- PhysicalDevicePushDescriptorPropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePushDescriptorPropertiesKHR()
+ PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxPushDescriptors( maxPushDescriptors_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) );
+ return *this;
+ }
+
PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDevicePushDescriptorPropertiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDevicePushDescriptorPropertiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>(&rhs);
return *this;
}
@@ -50824,59 +47258,48 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDevicePushDescriptorPropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
+ void* pNext = {};
+ uint32_t maxPushDescriptors = {};
};
static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceRayTracingPropertiesNV
{
- struct PhysicalDeviceRayTracingPropertiesNV
- {
- protected:
- PhysicalDeviceRayTracingPropertiesNV() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>(this) = rhs;
- }
-
- PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
- void* pNext = nullptr;
- uint32_t shaderGroupHandleSize;
- uint32_t maxRecursionDepth;
- uint32_t maxShaderGroupStride;
- uint32_t shaderGroupBaseAlignment;
- uint64_t maxGeometryCount;
- uint64_t maxInstanceCount;
- uint64_t maxTriangleCount;
- uint32_t maxDescriptorSetAccelerationStructures;
- };
- static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceRayTracingPropertiesNV : public layout::PhysicalDeviceRayTracingPropertiesNV
- {
- PhysicalDeviceRayTracingPropertiesNV() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceRayTracingPropertiesNV()
+ PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {},
+ uint32_t maxRecursionDepth_ = {},
+ uint32_t maxShaderGroupStride_ = {},
+ uint32_t shaderGroupBaseAlignment_ = {},
+ uint64_t maxGeometryCount_ = {},
+ uint64_t maxInstanceCount_ = {},
+ uint64_t maxTriangleCount_ = {},
+ uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderGroupHandleSize( shaderGroupHandleSize_ )
+ , maxRecursionDepth( maxRecursionDepth_ )
+ , maxShaderGroupStride( maxShaderGroupStride_ )
+ , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
+ , maxGeometryCount( maxGeometryCount_ )
+ , maxInstanceCount( maxInstanceCount_ )
+ , maxTriangleCount( maxTriangleCount_ )
+ , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceRayTracingPropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceRayTracingPropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>(&rhs);
return *this;
}
@@ -50909,53 +47332,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceRayTracingPropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
+ void* pNext = {};
+ uint32_t shaderGroupHandleSize = {};
+ uint32_t maxRecursionDepth = {};
+ uint32_t maxShaderGroupStride = {};
+ uint32_t shaderGroupBaseAlignment = {};
+ uint64_t maxGeometryCount = {};
+ uint64_t maxInstanceCount = {};
+ uint64_t maxTriangleCount = {};
+ uint32_t maxDescriptorSetAccelerationStructures = {};
};
static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
{
- struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( vk::Bool32 representativeFragmentTest_ = 0 ) VULKAN_HPP_NOEXCEPT
- : representativeFragmentTest( representativeFragmentTest_ )
- {}
-
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 representativeFragmentTest;
- };
- static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV : public layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( vk::Bool32 representativeFragmentTest_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV( representativeFragmentTest_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT
+ : representativeFragmentTest( representativeFragmentTest_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>(&rhs);
return *this;
}
@@ -50965,7 +47376,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( vk::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
{
representativeFragmentTest = representativeFragmentTest_;
return *this;
@@ -50993,56 +47404,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceRepresentativeFragmentTestFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
};
static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceSampleLocationsPropertiesEXT
{
- struct PhysicalDeviceSampleLocationsPropertiesEXT
+ PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {},
+ std::array<float,2> const& sampleLocationCoordinateRange_ = {},
+ uint32_t sampleLocationSubPixelBits_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
+ : sampleLocationSampleCounts( sampleLocationSampleCounts_ )
+ , maxSampleLocationGridSize( maxSampleLocationGridSize_ )
+ , sampleLocationCoordinateRange{}
+ , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
+ , variableSampleLocations( variableSampleLocations_ )
{
- protected:
- PhysicalDeviceSampleLocationsPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
- void* pNext = nullptr;
- vk::SampleCountFlags sampleLocationSampleCounts;
- vk::Extent2D maxSampleLocationGridSize;
- float sampleLocationCoordinateRange[2];
- uint32_t sampleLocationSubPixelBits;
- vk::Bool32 variableSampleLocations;
- };
- static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<float,2,2>::copy( sampleLocationCoordinateRange, sampleLocationCoordinateRange_ );
+ }
- struct PhysicalDeviceSampleLocationsPropertiesEXT : public layout::PhysicalDeviceSampleLocationsPropertiesEXT
- {
- PhysicalDeviceSampleLocationsPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSampleLocationsPropertiesEXT()
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) );
+ return *this;
+ }
PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSampleLocationsPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSampleLocationsPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>(&rhs);
return *this;
}
@@ -51072,67 +47471,54 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSampleLocationsPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
+ float sampleLocationCoordinateRange[2] = {};
+ uint32_t sampleLocationSubPixelBits = {};
+ VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
};
static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
- {
- protected:
- PhysicalDeviceSamplerFilterMinmaxPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
- void* pNext = nullptr;
- vk::Bool32 filterMinmaxSingleComponentFormats;
- vk::Bool32 filterMinmaxImageComponentMapping;
- };
- static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT : public layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT
+ struct PhysicalDeviceSamplerFilterMinmaxProperties
{
- PhysicalDeviceSamplerFilterMinmaxPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT()
+ PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT
+ : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
+ , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
{}
- PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) - offsetof( PhysicalDeviceSamplerFilterMinmaxProperties, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSamplerFilterMinmaxProperties& operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>(&rhs);
return *this;
}
- operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
- operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
}
- bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -51140,58 +47526,40 @@ namespace VULKAN_HPP_NAMESPACE
&& ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
}
- bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
};
- static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceSamplerYcbcrConversionFeatures
{
- struct PhysicalDeviceSamplerYcbcrConversionFeatures
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( vk::Bool32 samplerYcbcrConversion_ = 0 ) VULKAN_HPP_NOEXCEPT
- : samplerYcbcrConversion( samplerYcbcrConversion_ )
- {}
-
- PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>(this) = rhs;
- }
-
- PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
- void* pNext = nullptr;
- vk::Bool32 samplerYcbcrConversion;
- };
- static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSamplerYcbcrConversionFeatures : public layout::PhysicalDeviceSamplerYcbcrConversionFeatures
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( vk::Bool32 samplerYcbcrConversion_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSamplerYcbcrConversionFeatures( samplerYcbcrConversion_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT
+ : samplerYcbcrConversion( samplerYcbcrConversion_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSamplerYcbcrConversionFeatures( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSamplerYcbcrConversionFeatures::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>(&rhs);
return *this;
}
@@ -51201,7 +47569,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( vk::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
{
samplerYcbcrConversion = samplerYcbcrConversion_;
return *this;
@@ -51229,257 +47597,198 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSamplerYcbcrConversionFeatures::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
};
static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceScalarBlockLayoutFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( vk::Bool32 scalarBlockLayout_ = 0 ) VULKAN_HPP_NOEXCEPT
- : scalarBlockLayout( scalarBlockLayout_ )
- {}
-
- PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 scalarBlockLayout;
- };
- static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceScalarBlockLayoutFeaturesEXT : public layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT
+ struct PhysicalDeviceScalarBlockLayoutFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( vk::Bool32 scalarBlockLayout_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT( scalarBlockLayout_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT
+ : scalarBlockLayout( scalarBlockLayout_ )
{}
- PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) - offsetof( PhysicalDeviceScalarBlockLayoutFeatures, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceScalarBlockLayoutFeatures& operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceScalarBlockLayoutFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceScalarBlockLayoutFeaturesEXT & setScalarBlockLayout( vk::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
{
scalarBlockLayout = scalarBlockLayout_;
return *this;
}
- operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
- operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeaturesEXT*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
}
- bool operator==( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( scalarBlockLayout == rhs.scalarBlockLayout );
}
- bool operator!=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceScalarBlockLayoutFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
};
- static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( vk::Bool32 separateDepthStencilLayouts_ = 0 ) VULKAN_HPP_NOEXCEPT
- : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
- {}
-
- PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 separateDepthStencilLayouts;
- };
- static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR : public layout::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR
+ struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( vk::Bool32 separateDepthStencilLayouts_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( separateDepthStencilLayouts_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
+ : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
{}
- PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) - offsetof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures, pNext ) );
+ return *this;
+ }
- PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures& operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR & setSeparateDepthStencilLayouts( vk::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
{
separateDepthStencilLayouts = separateDepthStencilLayouts_;
return *this;
}
- operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
- operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
}
- bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
}
- bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
};
- static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceShaderAtomicInt64FeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( vk::Bool32 shaderBufferInt64Atomics_ = 0,
- vk::Bool32 shaderSharedInt64Atomics_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
- , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
- {}
-
- PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>(this) = rhs;
- return *this;
- }
+ static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "struct wrapper is not a standard layout!" );
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 shaderBufferInt64Atomics;
- vk::Bool32 shaderSharedInt64Atomics;
- };
- static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderAtomicInt64FeaturesKHR : public layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR
+ struct PhysicalDeviceShaderAtomicInt64Features
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( vk::Bool32 shaderBufferInt64Atomics_ = 0,
- vk::Bool32 shaderSharedInt64Atomics_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR( shaderBufferInt64Atomics_, shaderSharedInt64Atomics_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
+ , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
{}
- PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) - offsetof( PhysicalDeviceShaderAtomicInt64Features, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicInt64Features& operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>(&rhs);
return *this;
}
- PhysicalDeviceShaderAtomicInt64FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicInt64Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderBufferInt64Atomics( vk::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
return *this;
}
- PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderSharedInt64Atomics( vk::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
{
shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
return *this;
}
- operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
- operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64FeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
}
- bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -51487,62 +47796,42 @@ namespace VULKAN_HPP_NAMESPACE
&& ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
}
- bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderAtomicInt64FeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
};
- static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceShaderClockFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( vk::Bool32 shaderSubgroupClock_ = 0,
- vk::Bool32 shaderDeviceClock_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupClock( shaderSubgroupClock_ )
- , shaderDeviceClock( shaderDeviceClock_ )
- {}
-
- PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 shaderSubgroupClock;
- vk::Bool32 shaderDeviceClock;
- };
- static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64Features>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceShaderClockFeaturesKHR : public layout::PhysicalDeviceShaderClockFeaturesKHR
+ struct PhysicalDeviceShaderClockFeaturesKHR
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( vk::Bool32 shaderSubgroupClock_ = 0,
- vk::Bool32 shaderDeviceClock_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderClockFeaturesKHR( shaderSubgroupClock_, shaderDeviceClock_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderSubgroupClock( shaderSubgroupClock_ )
+ , shaderDeviceClock( shaderDeviceClock_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) - offsetof( PhysicalDeviceShaderClockFeaturesKHR, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderClockFeaturesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderClockFeaturesKHR& operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderClockFeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>(&rhs);
return *this;
}
@@ -51552,13 +47841,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( vk::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupClock = shaderSubgroupClock_;
return *this;
}
- PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( vk::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
{
shaderDeviceClock = shaderDeviceClock_;
return *this;
@@ -51587,53 +47876,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderClockFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
};
static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceShaderCoreProperties2AMD
{
- struct PhysicalDeviceShaderCoreProperties2AMD
- {
- protected:
- PhysicalDeviceShaderCoreProperties2AMD() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>(this) = rhs;
- }
-
- PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
- void* pNext = nullptr;
- vk::ShaderCorePropertiesFlagsAMD shaderCoreFeatures;
- uint32_t activeComputeUnitCount;
- };
- static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderCoreProperties2AMD : public layout::PhysicalDeviceShaderCoreProperties2AMD
- {
- PhysicalDeviceShaderCoreProperties2AMD() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderCoreProperties2AMD()
+ PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {},
+ uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderCoreFeatures( shaderCoreFeatures_ )
+ , activeComputeUnitCount( activeComputeUnitCount_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderCoreProperties2AMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderCoreProperties2AMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>(&rhs);
return *this;
}
@@ -51660,65 +47933,61 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderCoreProperties2AMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
+ uint32_t activeComputeUnitCount = {};
};
static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceShaderCorePropertiesAMD
- {
- protected:
- PhysicalDeviceShaderCorePropertiesAMD() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>(this) = rhs;
- }
-
- PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
- void* pNext = nullptr;
- uint32_t shaderEngineCount;
- uint32_t shaderArraysPerEngineCount;
- uint32_t computeUnitsPerShaderArray;
- uint32_t simdPerComputeUnit;
- uint32_t wavefrontsPerSimd;
- uint32_t wavefrontSize;
- uint32_t sgprsPerSimd;
- uint32_t minSgprAllocation;
- uint32_t maxSgprAllocation;
- uint32_t sgprAllocationGranularity;
- uint32_t vgprsPerSimd;
- uint32_t minVgprAllocation;
- uint32_t maxVgprAllocation;
- uint32_t vgprAllocationGranularity;
- };
- static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderCorePropertiesAMD : public layout::PhysicalDeviceShaderCorePropertiesAMD
- {
- PhysicalDeviceShaderCorePropertiesAMD() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderCorePropertiesAMD()
- {}
+ struct PhysicalDeviceShaderCorePropertiesAMD
+ {
+ PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {},
+ uint32_t shaderArraysPerEngineCount_ = {},
+ uint32_t computeUnitsPerShaderArray_ = {},
+ uint32_t simdPerComputeUnit_ = {},
+ uint32_t wavefrontsPerSimd_ = {},
+ uint32_t wavefrontSize_ = {},
+ uint32_t sgprsPerSimd_ = {},
+ uint32_t minSgprAllocation_ = {},
+ uint32_t maxSgprAllocation_ = {},
+ uint32_t sgprAllocationGranularity_ = {},
+ uint32_t vgprsPerSimd_ = {},
+ uint32_t minVgprAllocation_ = {},
+ uint32_t maxVgprAllocation_ = {},
+ uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderEngineCount( shaderEngineCount_ )
+ , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ )
+ , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ )
+ , simdPerComputeUnit( simdPerComputeUnit_ )
+ , wavefrontsPerSimd( wavefrontsPerSimd_ )
+ , wavefrontSize( wavefrontSize_ )
+ , sgprsPerSimd( sgprsPerSimd_ )
+ , minSgprAllocation( minSgprAllocation_ )
+ , maxSgprAllocation( maxSgprAllocation_ )
+ , sgprAllocationGranularity( sgprAllocationGranularity_ )
+ , vgprsPerSimd( vgprsPerSimd_ )
+ , minVgprAllocation( minVgprAllocation_ )
+ , maxVgprAllocation( maxVgprAllocation_ )
+ , vgprAllocationGranularity( vgprAllocationGranularity_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) );
+ return *this;
+ }
PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderCorePropertiesAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderCorePropertiesAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>(&rhs);
return *this;
}
@@ -51757,53 +48026,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderCorePropertiesAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
+ void* pNext = {};
+ uint32_t shaderEngineCount = {};
+ uint32_t shaderArraysPerEngineCount = {};
+ uint32_t computeUnitsPerShaderArray = {};
+ uint32_t simdPerComputeUnit = {};
+ uint32_t wavefrontsPerSimd = {};
+ uint32_t wavefrontSize = {};
+ uint32_t sgprsPerSimd = {};
+ uint32_t minSgprAllocation = {};
+ uint32_t maxSgprAllocation = {};
+ uint32_t sgprAllocationGranularity = {};
+ uint32_t vgprsPerSimd = {};
+ uint32_t minVgprAllocation = {};
+ uint32_t maxVgprAllocation = {};
+ uint32_t vgprAllocationGranularity = {};
};
static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
{
- struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( vk::Bool32 shaderDemoteToHelperInvocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
- {}
-
- PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 shaderDemoteToHelperInvocation;
- };
- static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : public layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( vk::Bool32 shaderDemoteToHelperInvocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( shaderDemoteToHelperInvocation_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(&rhs);
return *this;
}
@@ -51813,7 +48076,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( vk::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
{
shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
return *this;
@@ -51841,53 +48104,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
};
static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceShaderDrawParametersFeatures
{
- struct PhysicalDeviceShaderDrawParametersFeatures
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( vk::Bool32 shaderDrawParameters_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderDrawParameters( shaderDrawParameters_ )
- {}
-
- PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>(this) = rhs;
- }
-
- PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
- void* pNext = nullptr;
- vk::Bool32 shaderDrawParameters;
- };
- static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderDrawParametersFeatures : public layout::PhysicalDeviceShaderDrawParametersFeatures
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( vk::Bool32 shaderDrawParameters_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderDrawParametersFeatures( shaderDrawParameters_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderDrawParameters( shaderDrawParameters_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderDrawParametersFeatures( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderDrawParametersFeatures::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>(&rhs);
return *this;
}
@@ -51897,7 +48141,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( vk::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
{
shaderDrawParameters = shaderDrawParameters_;
return *this;
@@ -51925,89 +48169,68 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderDrawParametersFeatures::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
};
static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceShaderFloat16Int8FeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( vk::Bool32 shaderFloat16_ = 0,
- vk::Bool32 shaderInt8_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderFloat16( shaderFloat16_ )
- , shaderInt8( shaderInt8_ )
- {}
-
- PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 shaderFloat16;
- vk::Bool32 shaderInt8;
- };
- static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderFloat16Int8FeaturesKHR : public layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR
+ struct PhysicalDeviceShaderFloat16Int8Features
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( vk::Bool32 shaderFloat16_ = 0,
- vk::Bool32 shaderInt8_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR( shaderFloat16_, shaderInt8_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderFloat16( shaderFloat16_ )
+ , shaderInt8( shaderInt8_ )
{}
- PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) - offsetof( PhysicalDeviceShaderFloat16Int8Features, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderFloat16Int8Features& operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>(&rhs);
return *this;
}
- PhysicalDeviceShaderFloat16Int8FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderFloat16Int8Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderFloat16( vk::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
{
shaderFloat16 = shaderFloat16_;
return *this;
}
- PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderInt8( vk::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
{
shaderInt8 = shaderInt8_;
return *this;
}
- operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
- operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8FeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
}
- bool operator==( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -52015,58 +48238,40 @@ namespace VULKAN_HPP_NAMESPACE
&& ( shaderInt8 == rhs.shaderInt8 );
}
- bool operator!=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderFloat16Int8FeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
};
- static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8Features>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceShaderImageFootprintFeaturesNV
{
- struct PhysicalDeviceShaderImageFootprintFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( vk::Bool32 imageFootprint_ = 0 ) VULKAN_HPP_NOEXCEPT
- : imageFootprint( imageFootprint_ )
- {}
-
- PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 imageFootprint;
- };
- static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderImageFootprintFeaturesNV : public layout::PhysicalDeviceShaderImageFootprintFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( vk::Bool32 imageFootprint_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderImageFootprintFeaturesNV( imageFootprint_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT
+ : imageFootprint( imageFootprint_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderImageFootprintFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderImageFootprintFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>(&rhs);
return *this;
}
@@ -52076,7 +48281,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( vk::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
{
imageFootprint = imageFootprint_;
return *this;
@@ -52104,53 +48309,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderImageFootprintFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
};
static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( vk::Bool32 shaderIntegerFunctions2_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
- {}
-
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>(this) = rhs;
- }
-
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
- void* pNext = nullptr;
- vk::Bool32 shaderIntegerFunctions2;
- };
- static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : public layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
+ struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( vk::Bool32 shaderIntegerFunctions2_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( shaderIntegerFunctions2_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>(&rhs);
return *this;
}
@@ -52160,7 +48346,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( vk::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
{
shaderIntegerFunctions2 = shaderIntegerFunctions2_;
return *this;
@@ -52188,53 +48374,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
};
static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( vk::Bool32 shaderSMBuiltins_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderSMBuiltins( shaderSMBuiltins_ )
- {}
-
- PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 shaderSMBuiltins;
- };
- static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderSMBuiltinsFeaturesNV : public layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV
+ struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( vk::Bool32 shaderSMBuiltins_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV( shaderSMBuiltins_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderSMBuiltins( shaderSMBuiltins_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>(&rhs);
return *this;
}
@@ -52244,7 +48411,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( vk::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
{
shaderSMBuiltins = shaderSMBuiltins_;
return *this;
@@ -52272,53 +48439,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderSMBuiltinsFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
};
static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
- {
- protected:
- PhysicalDeviceShaderSMBuiltinsPropertiesNV() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>(this) = rhs;
- }
-
- PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
- void* pNext = nullptr;
- uint32_t shaderSMCount;
- uint32_t shaderWarpsPerSM;
- };
- static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderSMBuiltinsPropertiesNV : public layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV
+ struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
{
- PhysicalDeviceShaderSMBuiltinsPropertiesNV() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV()
+ PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {},
+ uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderSMCount( shaderSMCount_ )
+ , shaderWarpsPerSM( shaderWarpsPerSM_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>(&rhs);
return *this;
}
@@ -52345,141 +48495,102 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderSMBuiltinsPropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
+ void* pNext = {};
+ uint32_t shaderSMCount = {};
+ uint32_t shaderWarpsPerSM = {};
};
static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( vk::Bool32 shaderSubgroupExtendedTypes_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
- {}
-
- PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 shaderSubgroupExtendedTypes;
- };
- static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR : public layout::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR
+ struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( vk::Bool32 shaderSubgroupExtendedTypes_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( shaderSubgroupExtendedTypes_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
{}
- PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) - offsetof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures, pNext ) );
+ return *this;
+ }
- PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures& operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR & setShaderSubgroupExtendedTypes( vk::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
{
shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
return *this;
}
- operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
- operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
}
- bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
}
- bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
};
- static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceShadingRateImageFeaturesNV
{
- struct PhysicalDeviceShadingRateImageFeaturesNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( vk::Bool32 shadingRateImage_ = 0,
- vk::Bool32 shadingRateCoarseSampleOrder_ = 0 ) VULKAN_HPP_NOEXCEPT
- : shadingRateImage( shadingRateImage_ )
- , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
- {}
-
- PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>(this) = rhs;
- }
-
- PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
- void* pNext = nullptr;
- vk::Bool32 shadingRateImage;
- vk::Bool32 shadingRateCoarseSampleOrder;
- };
- static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShadingRateImageFeaturesNV : public layout::PhysicalDeviceShadingRateImageFeaturesNV
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( vk::Bool32 shadingRateImage_ = 0,
- vk::Bool32 shadingRateCoarseSampleOrder_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShadingRateImageFeaturesNV( shadingRateImage_, shadingRateCoarseSampleOrder_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shadingRateImage( shadingRateImage_ )
+ , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShadingRateImageFeaturesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShadingRateImageFeaturesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>(&rhs);
return *this;
}
@@ -52489,13 +48600,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( vk::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateImage = shadingRateImage_;
return *this;
}
- PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( vk::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
return *this;
@@ -52524,54 +48635,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShadingRateImageFeaturesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
};
static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceShadingRateImagePropertiesNV
{
- struct PhysicalDeviceShadingRateImagePropertiesNV
- {
- protected:
- PhysicalDeviceShadingRateImagePropertiesNV() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>(this) = rhs;
- }
-
- PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
- void* pNext = nullptr;
- vk::Extent2D shadingRateTexelSize;
- uint32_t shadingRatePaletteSize;
- uint32_t shadingRateMaxCoarseSamples;
- };
- static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceShadingRateImagePropertiesNV : public layout::PhysicalDeviceShadingRateImagePropertiesNV
- {
- PhysicalDeviceShadingRateImagePropertiesNV() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShadingRateImagePropertiesNV()
+ PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {},
+ uint32_t shadingRatePaletteSize_ = {},
+ uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shadingRateTexelSize( shadingRateTexelSize_ )
+ , shadingRatePaletteSize( shadingRatePaletteSize_ )
+ , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceShadingRateImagePropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceShadingRateImagePropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>(&rhs);
return *this;
}
@@ -52599,69 +48695,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceShadingRateImagePropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
+ uint32_t shadingRatePaletteSize = {};
+ uint32_t shadingRateMaxCoarseSamples = {};
};
static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceSparseImageFormatInfo2
{
- struct PhysicalDeviceSparseImageFormatInfo2
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined,
- vk::ImageType type_ = vk::ImageType::e1D,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , type( type_ )
- , samples( samples_ )
- , usage( usage_ )
- , tiling( tiling_ )
- {}
-
- PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>(this) = rhs;
- }
-
- PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
- const void* pNext = nullptr;
- vk::Format format;
- vk::ImageType type;
- vk::SampleCountFlagBits samples;
- vk::ImageUsageFlags usage;
- vk::ImageTiling tiling;
- };
- static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSparseImageFormatInfo2 : public layout::PhysicalDeviceSparseImageFormatInfo2
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( vk::Format format_ = vk::Format::eUndefined,
- vk::ImageType type_ = vk::ImageType::e1D,
- vk::SampleCountFlagBits samples_ = vk::SampleCountFlagBits::e1,
- vk::ImageUsageFlags usage_ = vk::ImageUsageFlags(),
- vk::ImageTiling tiling_ = vk::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSparseImageFormatInfo2( format_, type_, samples_, usage_, tiling_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D,
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {},
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
+ : format( format_ )
+ , type( type_ )
+ , samples( samples_ )
+ , usage( usage_ )
+ , tiling( tiling_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSparseImageFormatInfo2( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSparseImageFormatInfo2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>(&rhs);
return *this;
}
@@ -52671,31 +48742,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceSparseImageFormatInfo2 & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- PhysicalDeviceSparseImageFormatInfo2 & setType( vk::ImageType type_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
}
- PhysicalDeviceSparseImageFormatInfo2 & setSamples( vk::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
{
samples = samples_;
return *this;
}
- PhysicalDeviceSparseImageFormatInfo2 & setUsage( vk::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
{
usage = usage_;
return *this;
}
- PhysicalDeviceSparseImageFormatInfo2 & setTiling( vk::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
{
tiling = tiling_;
return *this;
@@ -52717,7 +48788,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pNext == rhs.pNext )
&& ( format == rhs.format )
&& ( type == rhs.type )
- && vk::operator==( samples, rhs.samples )
+ && ( samples == rhs.samples )
&& ( usage == rhs.usage )
&& ( tiling == rhs.tiling );
}
@@ -52727,55 +48798,44 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSparseImageFormatInfo2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
+ VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
+ VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
};
static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceSubgroupProperties
- {
- protected:
- PhysicalDeviceSubgroupProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>(this) = rhs;
- }
-
- PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
- void* pNext = nullptr;
- uint32_t subgroupSize;
- vk::ShaderStageFlags supportedStages;
- vk::SubgroupFeatureFlags supportedOperations;
- vk::Bool32 quadOperationsInAllStages;
- };
- static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSubgroupProperties : public layout::PhysicalDeviceSubgroupProperties
+ struct PhysicalDeviceSubgroupProperties
{
- PhysicalDeviceSubgroupProperties() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSubgroupProperties()
+ PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {},
+ VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT
+ : subgroupSize( subgroupSize_ )
+ , supportedStages( supportedStages_ )
+ , supportedOperations( supportedOperations_ )
+ , quadOperationsInAllStages( quadOperationsInAllStages_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSubgroupProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSubgroupProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>(&rhs);
return *this;
}
@@ -52804,57 +48864,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSubgroupProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
+ void* pNext = {};
+ uint32_t subgroupSize = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
+ VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
+ VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
};
static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( vk::Bool32 subgroupSizeControl_ = 0,
- vk::Bool32 computeFullSubgroups_ = 0 ) VULKAN_HPP_NOEXCEPT
- : subgroupSizeControl( subgroupSizeControl_ )
- , computeFullSubgroups( computeFullSubgroups_ )
- {}
-
- PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 subgroupSizeControl;
- vk::Bool32 computeFullSubgroups;
- };
- static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSubgroupSizeControlFeaturesEXT : public layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT
+ struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( vk::Bool32 subgroupSizeControl_ = 0,
- vk::Bool32 computeFullSubgroups_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT( subgroupSizeControl_, computeFullSubgroups_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT
+ : subgroupSizeControl( subgroupSizeControl_ )
+ , computeFullSubgroups( computeFullSubgroups_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>(&rhs);
return *this;
}
@@ -52864,13 +48906,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( vk::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
{
subgroupSizeControl = subgroupSizeControl_;
return *this;
}
- PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( vk::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
{
computeFullSubgroups = computeFullSubgroups_;
return *this;
@@ -52899,55 +48941,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSubgroupSizeControlFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
+ VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
};
static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
{
- struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
- {
- protected:
- PhysicalDeviceSubgroupSizeControlPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
- void* pNext = nullptr;
- uint32_t minSubgroupSize;
- uint32_t maxSubgroupSize;
- uint32_t maxComputeWorkgroupSubgroups;
- vk::ShaderStageFlags requiredSubgroupSizeStages;
- };
- static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSubgroupSizeControlPropertiesEXT : public layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT
- {
- PhysicalDeviceSubgroupSizeControlPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT()
+ PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {},
+ uint32_t maxSubgroupSize_ = {},
+ uint32_t maxComputeWorkgroupSubgroups_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT
+ : minSubgroupSize( minSubgroupSize_ )
+ , maxSubgroupSize( maxSubgroupSize_ )
+ , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
+ , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>(&rhs);
return *this;
}
@@ -52976,53 +49004,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSubgroupSizeControlPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
+ void* pNext = {};
+ uint32_t minSubgroupSize = {};
+ uint32_t maxSubgroupSize = {};
+ uint32_t maxComputeWorkgroupSubgroups = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
};
static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceSurfaceInfo2KHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( vk::SurfaceKHR surface_ = vk::SurfaceKHR() ) VULKAN_HPP_NOEXCEPT
- : surface( surface_ )
- {}
-
- PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>(this) = rhs;
- }
-
- PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
- const void* pNext = nullptr;
- vk::SurfaceKHR surface;
- };
- static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceSurfaceInfo2KHR : public layout::PhysicalDeviceSurfaceInfo2KHR
+ struct PhysicalDeviceSurfaceInfo2KHR
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( vk::SurfaceKHR surface_ = vk::SurfaceKHR() ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSurfaceInfo2KHR( surface_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT
+ : surface( surface_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceSurfaceInfo2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceSurfaceInfo2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>(&rhs);
return *this;
}
@@ -53032,7 +49044,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceSurfaceInfo2KHR & setSurface( vk::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
@@ -53060,53 +49072,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceSurfaceInfo2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
};
static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( vk::Bool32 texelBufferAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
- : texelBufferAlignment( texelBufferAlignment_ )
- {}
-
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 texelBufferAlignment;
- };
- static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT : public layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT
+ struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( vk::Bool32 texelBufferAlignment_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT( texelBufferAlignment_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
+ : texelBufferAlignment( texelBufferAlignment_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>(&rhs);
return *this;
}
@@ -53116,7 +49109,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( vk::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
{
texelBufferAlignment = texelBufferAlignment_;
return *this;
@@ -53144,55 +49137,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTexelBufferAlignmentFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
};
static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
{
- struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
- {
- protected:
- PhysicalDeviceTexelBufferAlignmentPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
- void* pNext = nullptr;
- vk::DeviceSize storageTexelBufferOffsetAlignmentBytes;
- vk::Bool32 storageTexelBufferOffsetSingleTexelAlignment;
- vk::DeviceSize uniformTexelBufferOffsetAlignmentBytes;
- vk::Bool32 uniformTexelBufferOffsetSingleTexelAlignment;
- };
- static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT : public layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT
- {
- PhysicalDeviceTexelBufferAlignmentPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT()
+ PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
+ : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
+ , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
+ , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
+ , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>(&rhs);
return *this;
}
@@ -53221,63 +49199,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTexelBufferAlignmentPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
};
static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( vk::Bool32 textureCompressionASTC_HDR_ = 0 ) VULKAN_HPP_NOEXCEPT
- : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
- {}
-
- PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
- const void* pNext = nullptr;
- vk::Bool32 textureCompressionASTC_HDR;
- };
- static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT : public layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
+ struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( vk::Bool32 textureCompressionASTC_HDR_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( textureCompressionASTC_HDR_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT
+ : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>(&rhs);
return *this;
}
- PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( vk::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
{
textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
return *this;
@@ -53305,212 +49267,228 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
};
static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceTimelineSemaphoreFeatures
{
- struct PhysicalDeviceTimelineSemaphoreFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeaturesKHR( vk::Bool32 timelineSemaphore_ = 0 ) VULKAN_HPP_NOEXCEPT
- : timelineSemaphore( timelineSemaphore_ )
- {}
-
- PhysicalDeviceTimelineSemaphoreFeaturesKHR( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceTimelineSemaphoreFeaturesKHR& operator=( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 timelineSemaphore;
- };
- static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeaturesKHR ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceTimelineSemaphoreFeaturesKHR : public layout::PhysicalDeviceTimelineSemaphoreFeaturesKHR
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeaturesKHR( vk::Bool32 timelineSemaphore_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTimelineSemaphoreFeaturesKHR( timelineSemaphore_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} ) VULKAN_HPP_NOEXCEPT
+ : timelineSemaphore( timelineSemaphore_ )
{}
- PhysicalDeviceTimelineSemaphoreFeaturesKHR( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTimelineSemaphoreFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) - offsetof( PhysicalDeviceTimelineSemaphoreFeatures, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceTimelineSemaphoreFeaturesKHR& operator=( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreFeatures& operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTimelineSemaphoreFeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceTimelineSemaphoreFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceTimelineSemaphoreFeaturesKHR & setTimelineSemaphore( vk::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
{
timelineSemaphore = timelineSemaphore_;
return *this;
}
- operator VkPhysicalDeviceTimelineSemaphoreFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
- operator VkPhysicalDeviceTimelineSemaphoreFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
}
- bool operator==( PhysicalDeviceTimelineSemaphoreFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( timelineSemaphore == rhs.timelineSemaphore );
}
- bool operator!=( PhysicalDeviceTimelineSemaphoreFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTimelineSemaphoreFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
};
- static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeaturesKHR ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceTimelineSemaphorePropertiesKHR
- {
- protected:
- PhysicalDeviceTimelineSemaphorePropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceTimelineSemaphorePropertiesKHR( VkPhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTimelineSemaphorePropertiesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceTimelineSemaphorePropertiesKHR& operator=( VkPhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTimelineSemaphorePropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphorePropertiesKHR;
- void* pNext = nullptr;
- uint64_t maxTimelineSemaphoreValueDifference;
- };
- static_assert( sizeof( PhysicalDeviceTimelineSemaphorePropertiesKHR ) == sizeof( VkPhysicalDeviceTimelineSemaphorePropertiesKHR ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeatures>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceTimelineSemaphorePropertiesKHR : public layout::PhysicalDeviceTimelineSemaphorePropertiesKHR
+ struct PhysicalDeviceTimelineSemaphoreProperties
{
- PhysicalDeviceTimelineSemaphorePropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTimelineSemaphorePropertiesKHR()
+ PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
{}
- PhysicalDeviceTimelineSemaphorePropertiesKHR( VkPhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTimelineSemaphorePropertiesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) - offsetof( PhysicalDeviceTimelineSemaphoreProperties, pNext ) );
+ return *this;
+ }
- PhysicalDeviceTimelineSemaphorePropertiesKHR& operator=( VkPhysicalDeviceTimelineSemaphorePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTimelineSemaphorePropertiesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDeviceTimelineSemaphoreProperties& operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>(&rhs);
return *this;
}
- operator VkPhysicalDeviceTimelineSemaphorePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphorePropertiesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
- operator VkPhysicalDeviceTimelineSemaphorePropertiesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphorePropertiesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
}
- bool operator==( PhysicalDeviceTimelineSemaphorePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
}
- bool operator!=( PhysicalDeviceTimelineSemaphorePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTimelineSemaphorePropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
+ void* pNext = {};
+ uint64_t maxTimelineSemaphoreValueDifference = {};
};
- static_assert( sizeof( PhysicalDeviceTimelineSemaphorePropertiesKHR ) == sizeof( VkPhysicalDeviceTimelineSemaphorePropertiesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphorePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceToolPropertiesEXT
{
- struct PhysicalDeviceTransformFeedbackFeaturesEXT
+ PhysicalDeviceToolPropertiesEXT( std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& name_ = {},
+ std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& version_ = {},
+ VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
+ std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layer_ = {} ) VULKAN_HPP_NOEXCEPT
+ : name{}
+ , version{}
+ , purposes( purposes_ )
+ , description{}
+ , layer{}
{
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( vk::Bool32 transformFeedback_ = 0,
- vk::Bool32 geometryStreams_ = 0 ) VULKAN_HPP_NOEXCEPT
- : transformFeedback( transformFeedback_ )
- , geometryStreams( geometryStreams_ )
- {}
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( name, name_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( version, version_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_EXTENSION_NAME_SIZE,VK_MAX_EXTENSION_NAME_SIZE>::copy( layer, layer_ );
+ }
- PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>(this) = rhs;
- }
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT ) - offsetof( PhysicalDeviceToolPropertiesEXT, pNext ) );
+ return *this;
+ }
- PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>(this) = rhs;
- return *this;
- }
+ PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 transformFeedback;
- vk::Bool32 geometryStreams;
- };
- static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
+ PhysicalDeviceToolPropertiesEXT& operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>(&rhs);
+ return *this;
+ }
+
+ operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT*>( this );
+ }
+
+ operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( this );
+ }
+
+ bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memcmp( name, rhs.name, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( version, rhs.version, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( purposes == rhs.purposes )
+ && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( layer, rhs.layer, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 );
+ }
+
+ bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
- struct PhysicalDeviceTransformFeedbackFeaturesEXT : public layout::PhysicalDeviceTransformFeedbackFeaturesEXT
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
+ void* pNext = {};
+ char name[VK_MAX_EXTENSION_NAME_SIZE] = {};
+ char version[VK_MAX_EXTENSION_NAME_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {};
+ char description[VK_MAX_DESCRIPTION_SIZE] = {};
+ char layer[VK_MAX_EXTENSION_NAME_SIZE] = {};
+ };
+ static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceTransformFeedbackFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( vk::Bool32 transformFeedback_ = 0,
- vk::Bool32 geometryStreams_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTransformFeedbackFeaturesEXT( transformFeedback_, geometryStreams_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT
+ : transformFeedback( transformFeedback_ )
+ , geometryStreams( geometryStreams_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTransformFeedbackFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTransformFeedbackFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>(&rhs);
return *this;
}
@@ -53520,13 +49498,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( vk::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
{
transformFeedback = transformFeedback_;
return *this;
}
- PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( vk::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
{
geometryStreams = geometryStreams_;
return *this;
@@ -53555,61 +49533,53 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTransformFeedbackFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
+ VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
};
static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceTransformFeedbackPropertiesEXT
- {
- protected:
- PhysicalDeviceTransformFeedbackPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
- void* pNext = nullptr;
- uint32_t maxTransformFeedbackStreams;
- uint32_t maxTransformFeedbackBuffers;
- vk::DeviceSize maxTransformFeedbackBufferSize;
- uint32_t maxTransformFeedbackStreamDataSize;
- uint32_t maxTransformFeedbackBufferDataSize;
- uint32_t maxTransformFeedbackBufferDataStride;
- vk::Bool32 transformFeedbackQueries;
- vk::Bool32 transformFeedbackStreamsLinesTriangles;
- vk::Bool32 transformFeedbackRasterizationStreamSelect;
- vk::Bool32 transformFeedbackDraw;
- };
- static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceTransformFeedbackPropertiesEXT : public layout::PhysicalDeviceTransformFeedbackPropertiesEXT
+ struct PhysicalDeviceTransformFeedbackPropertiesEXT
{
- PhysicalDeviceTransformFeedbackPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTransformFeedbackPropertiesEXT()
+ PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {},
+ uint32_t maxTransformFeedbackBuffers_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {},
+ uint32_t maxTransformFeedbackStreamDataSize_ = {},
+ uint32_t maxTransformFeedbackBufferDataSize_ = {},
+ uint32_t maxTransformFeedbackBufferDataStride_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
+ , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ )
+ , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ )
+ , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ )
+ , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ )
+ , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ )
+ , transformFeedbackQueries( transformFeedbackQueries_ )
+ , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ )
+ , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ )
+ , transformFeedbackDraw( transformFeedbackDraw_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceTransformFeedbackPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceTransformFeedbackPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>(&rhs);
return *this;
}
@@ -53644,141 +49614,110 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceTransformFeedbackPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
+ void* pNext = {};
+ uint32_t maxTransformFeedbackStreams = {};
+ uint32_t maxTransformFeedbackBuffers = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
+ uint32_t maxTransformFeedbackStreamDataSize = {};
+ uint32_t maxTransformFeedbackBufferDataSize = {};
+ uint32_t maxTransformFeedbackBufferDataStride = {};
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
+ VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
};
static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceUniformBufferStandardLayoutFeatures
{
- struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( vk::Bool32 uniformBufferStandardLayout_ = 0 ) VULKAN_HPP_NOEXCEPT
- : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
- {}
-
- PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>(this) = rhs;
- }
-
- PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 uniformBufferStandardLayout;
- };
- static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR : public layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR
- {
- VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( vk::Bool32 uniformBufferStandardLayout_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( uniformBufferStandardLayout_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT
+ : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
{}
- PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeatures, pNext ) );
+ return *this;
+ }
- PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ PhysicalDeviceUniformBufferStandardLayoutFeatures& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>(&rhs);
return *this;
}
- PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setUniformBufferStandardLayout( vk::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
{
uniformBufferStandardLayout = uniformBufferStandardLayout_;
return *this;
}
- operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
- operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
}
- bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
}
- bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
};
- static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct PhysicalDeviceVariablePointersFeatures
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( vk::Bool32 variablePointersStorageBuffer_ = 0,
- vk::Bool32 variablePointers_ = 0 ) VULKAN_HPP_NOEXCEPT
- : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
- , variablePointers( variablePointers_ )
- {}
-
- PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>(this) = rhs;
- }
-
- PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
- void* pNext = nullptr;
- vk::Bool32 variablePointersStorageBuffer;
- vk::Bool32 variablePointers;
- };
- static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
- struct PhysicalDeviceVariablePointersFeatures : public layout::PhysicalDeviceVariablePointersFeatures
+ struct PhysicalDeviceVariablePointersFeatures
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( vk::Bool32 variablePointersStorageBuffer_ = 0,
- vk::Bool32 variablePointers_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVariablePointersFeatures( variablePointersStorageBuffer_, variablePointers_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT
+ : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
+ , variablePointers( variablePointers_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVariablePointersFeatures( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceVariablePointersFeatures::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>(&rhs);
return *this;
}
@@ -53788,13 +49727,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( vk::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
{
variablePointersStorageBuffer = variablePointersStorageBuffer_;
return *this;
}
- PhysicalDeviceVariablePointersFeatures & setVariablePointers( vk::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
{
variablePointers = variablePointers_;
return *this;
@@ -53823,57 +49762,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceVariablePointersFeatures::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
};
static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( vk::Bool32 vertexAttributeInstanceRateDivisor_ = 0,
- vk::Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
- , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
- {}
-
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 vertexAttributeInstanceRateDivisor;
- vk::Bool32 vertexAttributeInstanceRateZeroDivisor;
- };
- static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT : public layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT
+ struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( vk::Bool32 vertexAttributeInstanceRateDivisor_ = 0,
- vk::Bool32 vertexAttributeInstanceRateZeroDivisor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT( vertexAttributeInstanceRateDivisor_, vertexAttributeInstanceRateZeroDivisor_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
+ : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
+ , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>(&rhs);
return *this;
}
@@ -53883,13 +49802,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( vk::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
return *this;
}
- PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( vk::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
{
vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
return *this;
@@ -53918,52 +49837,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceVertexAttributeDivisorFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
};
static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
{
- struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
- {
- protected:
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>(this) = rhs;
- }
-
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
- void* pNext = nullptr;
- uint32_t maxVertexAttribDivisor;
- };
- static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT : public layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT
- {
- PhysicalDeviceVertexAttributeDivisorPropertiesEXT() VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT()
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
+ : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>(&rhs);
return *this;
}
@@ -53989,159 +49891,1606 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceVertexAttributeDivisorPropertiesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+ void* pNext = {};
+ uint32_t maxVertexAttribDivisor = {};
};
static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PhysicalDeviceVulkan11Features
{
- struct PhysicalDeviceVulkanMemoryModelFeaturesKHR
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
+ : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
+ , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
+ , storagePushConstant16( storagePushConstant16_ )
+ , storageInputOutput16( storageInputOutput16_ )
+ , multiview( multiview_ )
+ , multiviewGeometryShader( multiviewGeometryShader_ )
+ , multiviewTessellationShader( multiviewTessellationShader_ )
+ , variablePointersStorageBuffer( variablePointersStorageBuffer_ )
+ , variablePointers( variablePointers_ )
+ , protectedMemory( protectedMemory_ )
+ , samplerYcbcrConversion( samplerYcbcrConversion_ )
+ , shaderDrawParameters( shaderDrawParameters_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( vk::Bool32 vulkanMemoryModel_ = 0,
- vk::Bool32 vulkanMemoryModelDeviceScope_ = 0,
- vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = 0 ) VULKAN_HPP_NOEXCEPT
- : vulkanMemoryModel( vulkanMemoryModel_ )
- , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
- , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) - offsetof( PhysicalDeviceVulkan11Features, pNext ) );
+ return *this;
+ }
- PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>(this) = rhs;
- }
+ PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>(this) = rhs;
- return *this;
- }
+ PhysicalDeviceVulkan11Features& operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR;
- void* pNext = nullptr;
- vk::Bool32 vulkanMemoryModel;
- vk::Bool32 vulkanMemoryModelDeviceScope;
- vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains;
- };
- static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "layout struct and wrapper have different size!" );
- }
+ PhysicalDeviceVulkan11Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ {
+ storageBuffer16BitAccess = storageBuffer16BitAccess_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ {
+ uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
+ return *this;
+ }
- struct PhysicalDeviceVulkanMemoryModelFeaturesKHR : public layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR
+ PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ storagePushConstant16 = storagePushConstant16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ storageInputOutput16 = storageInputOutput16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multiview = multiview_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multiviewGeometryShader = multiviewGeometryShader_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
+ {
+ multiviewTessellationShader = multiviewTessellationShader_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ variablePointersStorageBuffer = variablePointersStorageBuffer_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ variablePointers = variablePointers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
+ {
+ protectedMemory = protectedMemory_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ samplerYcbcrConversion = samplerYcbcrConversion_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDrawParameters = shaderDrawParameters_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
+ }
+
+ operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
+ }
+
+ bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
+ && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
+ && ( storagePushConstant16 == rhs.storagePushConstant16 )
+ && ( storageInputOutput16 == rhs.storageInputOutput16 )
+ && ( multiview == rhs.multiview )
+ && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
+ && ( multiviewTessellationShader == rhs.multiviewTessellationShader )
+ && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
+ && ( variablePointers == rhs.variablePointers )
+ && ( protectedMemory == rhs.protectedMemory )
+ && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion )
+ && ( shaderDrawParameters == rhs.shaderDrawParameters );
+ }
+
+ bool operator!=( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
+ VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
+ VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
+ };
+ static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceVulkan11Properties
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( vk::Bool32 vulkanMemoryModel_ = 0,
- vk::Bool32 vulkanMemoryModelDeviceScope_ = 0,
- vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR( vulkanMemoryModel_, vulkanMemoryModelDeviceScope_, vulkanMemoryModelAvailabilityVisibilityChains_ )
- {}
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {},
+ std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {},
+ std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {},
+ uint32_t deviceNodeMask_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {},
+ uint32_t subgroupSize_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {},
+ VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {},
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
+ uint32_t maxMultiviewViewCount_ = {},
+ uint32_t maxMultiviewInstanceIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {},
+ uint32_t maxPerSetDescriptors_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : deviceUUID{}
+ , driverUUID{}
+ , deviceLUID{}
+ , deviceNodeMask( deviceNodeMask_ )
+ , deviceLUIDValid( deviceLUIDValid_ )
+ , subgroupSize( subgroupSize_ )
+ , subgroupSupportedStages( subgroupSupportedStages_ )
+ , subgroupSupportedOperations( subgroupSupportedOperations_ )
+ , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ )
+ , pointClippingBehavior( pointClippingBehavior_ )
+ , maxMultiviewViewCount( maxMultiviewViewCount_ )
+ , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
+ , protectedNoFault( protectedNoFault_ )
+ , maxPerSetDescriptors( maxPerSetDescriptors_ )
+ , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( deviceUUID, deviceUUID_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_UUID_SIZE,VK_UUID_SIZE>::copy( driverUUID, driverUUID_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint8_t,VK_LUID_SIZE,VK_LUID_SIZE>::copy( deviceLUID, deviceLUID_ );
+ }
- PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) - offsetof( PhysicalDeviceVulkan11Properties, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Properties& operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>(&rhs);
return *this;
}
- PhysicalDeviceVulkanMemoryModelFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Properties & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModel( vk::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan11Properties & setDeviceUUID( std::array<uint8_t,VK_UUID_SIZE> deviceUUID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( deviceUUID, deviceUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setDriverUUID( std::array<uint8_t,VK_UUID_SIZE> driverUUID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( driverUUID, driverUUID_.data(), VK_UUID_SIZE * sizeof( uint8_t ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setDeviceLUID( std::array<uint8_t,VK_LUID_SIZE> deviceLUID_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( deviceLUID, deviceLUID_.data(), VK_LUID_SIZE * sizeof( uint8_t ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setDeviceNodeMask( uint32_t deviceNodeMask_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceNodeMask = deviceNodeMask_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setDeviceLUIDValid( VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ ) VULKAN_HPP_NOEXCEPT
+ {
+ deviceLUIDValid = deviceLUIDValid_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setSubgroupSize( uint32_t subgroupSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subgroupSize = subgroupSize_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setSubgroupSupportedStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subgroupSupportedStages = subgroupSupportedStages_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setSubgroupSupportedOperations( VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subgroupSupportedOperations = subgroupSupportedOperations_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setSubgroupQuadOperationsInAllStages( VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subgroupQuadOperationsInAllStages = subgroupQuadOperationsInAllStages_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setPointClippingBehavior( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pointClippingBehavior = pointClippingBehavior_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setMaxMultiviewViewCount( uint32_t maxMultiviewViewCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxMultiviewViewCount = maxMultiviewViewCount_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setMaxMultiviewInstanceIndex( uint32_t maxMultiviewInstanceIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxMultiviewInstanceIndex = maxMultiviewInstanceIndex_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setProtectedNoFault( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ ) VULKAN_HPP_NOEXCEPT
+ {
+ protectedNoFault = protectedNoFault_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setMaxPerSetDescriptors( uint32_t maxPerSetDescriptors_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerSetDescriptors = maxPerSetDescriptors_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan11Properties & setMaxMemoryAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxMemoryAllocationSize = maxMemoryAllocationSize_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
+ }
+
+ operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
+ }
+
+ bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 )
+ && ( deviceNodeMask == rhs.deviceNodeMask )
+ && ( deviceLUIDValid == rhs.deviceLUIDValid )
+ && ( subgroupSize == rhs.subgroupSize )
+ && ( subgroupSupportedStages == rhs.subgroupSupportedStages )
+ && ( subgroupSupportedOperations == rhs.subgroupSupportedOperations )
+ && ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages )
+ && ( pointClippingBehavior == rhs.pointClippingBehavior )
+ && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
+ && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex )
+ && ( protectedNoFault == rhs.protectedNoFault )
+ && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
+ && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
+ }
+
+ bool operator!=( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
+ void* pNext = {};
+ uint8_t deviceUUID[VK_UUID_SIZE] = {};
+ uint8_t driverUUID[VK_UUID_SIZE] = {};
+ uint8_t deviceLUID[VK_LUID_SIZE] = {};
+ uint32_t deviceNodeMask = {};
+ VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
+ uint32_t subgroupSize = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
+ VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
+ VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
+ uint32_t maxMultiviewViewCount = {};
+ uint32_t maxMultiviewInstanceIndex = {};
+ VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
+ uint32_t maxPerSetDescriptors = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
+ };
+ static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceVulkan12Features
+ {
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {} ) VULKAN_HPP_NOEXCEPT
+ : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ )
+ , drawIndirectCount( drawIndirectCount_ )
+ , storageBuffer8BitAccess( storageBuffer8BitAccess_ )
+ , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
+ , storagePushConstant8( storagePushConstant8_ )
+ , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
+ , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
+ , shaderFloat16( shaderFloat16_ )
+ , shaderInt8( shaderInt8_ )
+ , descriptorIndexing( descriptorIndexing_ )
+ , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
+ , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
+ , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
+ , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
+ , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
+ , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
+ , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
+ , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
+ , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
+ , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
+ , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
+ , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
+ , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
+ , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
+ , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
+ , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
+ , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
+ , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
+ , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
+ , runtimeDescriptorArray( runtimeDescriptorArray_ )
+ , samplerFilterMinmax( samplerFilterMinmax_ )
+ , scalarBlockLayout( scalarBlockLayout_ )
+ , imagelessFramebuffer( imagelessFramebuffer_ )
+ , uniformBufferStandardLayout( uniformBufferStandardLayout_ )
+ , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
+ , separateDepthStencilLayouts( separateDepthStencilLayouts_ )
+ , hostQueryReset( hostQueryReset_ )
+ , timelineSemaphore( timelineSemaphore_ )
+ , bufferDeviceAddress( bufferDeviceAddress_ )
+ , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
+ , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
+ , vulkanMemoryModel( vulkanMemoryModel_ )
+ , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
+ , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
+ , shaderOutputViewportIndex( shaderOutputViewportIndex_ )
+ , shaderOutputLayer( shaderOutputLayer_ )
+ , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) - offsetof( PhysicalDeviceVulkan12Features, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PhysicalDeviceVulkan12Features& operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>(&rhs);
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
+ {
+ samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ drawIndirectCount = drawIndirectCount_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ {
+ storageBuffer8BitAccess = storageBuffer8BitAccess_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
+ {
+ uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
+ {
+ storagePushConstant8 = storagePushConstant8_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderFloat16 = shaderFloat16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderInt8 = shaderInt8_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorIndexing = descriptorIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
+ {
+ runtimeDescriptorArray = runtimeDescriptorArray_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
+ {
+ samplerFilterMinmax = samplerFilterMinmax_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ scalarBlockLayout = scalarBlockLayout_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ imagelessFramebuffer = imagelessFramebuffer_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
+ {
+ uniformBufferStandardLayout = uniformBufferStandardLayout_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ separateDepthStencilLayouts = separateDepthStencilLayouts_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
+ {
+ hostQueryReset = hostQueryReset_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
+ {
+ timelineSemaphore = timelineSemaphore_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferDeviceAddress = bufferDeviceAddress_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
+ {
+ bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModel = vulkanMemoryModel_;
return *this;
}
- PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelDeviceScope( vk::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
return *this;
}
- PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelAvailabilityVisibilityChains( vk::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
{
vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
return *this;
}
- operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderOutputViewportIndex = shaderOutputViewportIndex_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderOutputLayer = shaderOutputLayer_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
+ {
+ subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>( this );
+ return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
}
- operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeaturesKHR*>( this );
+ return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
}
- bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
+ && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge )
+ && ( drawIndirectCount == rhs.drawIndirectCount )
+ && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
+ && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
+ && ( storagePushConstant8 == rhs.storagePushConstant8 )
+ && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
+ && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics )
+ && ( shaderFloat16 == rhs.shaderFloat16 )
+ && ( shaderInt8 == rhs.shaderInt8 )
+ && ( descriptorIndexing == rhs.descriptorIndexing )
+ && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
+ && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
+ && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
+ && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
+ && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
+ && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
+ && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
+ && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
+ && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
+ && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
+ && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
+ && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
+ && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
+ && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
+ && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
+ && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
+ && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
+ && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
+ && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
+ && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray )
+ && ( samplerFilterMinmax == rhs.samplerFilterMinmax )
+ && ( scalarBlockLayout == rhs.scalarBlockLayout )
+ && ( imagelessFramebuffer == rhs.imagelessFramebuffer )
+ && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout )
+ && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes )
+ && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts )
+ && ( hostQueryReset == rhs.hostQueryReset )
+ && ( timelineSemaphore == rhs.timelineSemaphore )
+ && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
+ && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
+ && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice )
&& ( vulkanMemoryModel == rhs.vulkanMemoryModel )
&& ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
- && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
+ && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains )
+ && ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex )
+ && ( shaderOutputLayer == rhs.shaderOutputLayer )
+ && ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
}
- bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceVulkanMemoryModelFeaturesKHR::sType;
- };
- static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
+ VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
+ VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
+ VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
+ VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
+ VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
+ VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
+ VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
+ VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
+ VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
+ VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
+ VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
+ VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
+ };
+ static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceVulkan12Properties
+ {
+ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
+ std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {},
+ std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {},
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {},
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {},
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
+ uint32_t maxPerStageUpdateAfterBindResources_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {},
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {},
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {},
+ uint64_t maxTimelineSemaphoreValueDifference_ = {},
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT
+ : driverID( driverID_ )
+ , driverName{}
+ , driverInfo{}
+ , conformanceVersion( conformanceVersion_ )
+ , denormBehaviorIndependence( denormBehaviorIndependence_ )
+ , roundingModeIndependence( roundingModeIndependence_ )
+ , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
+ , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
+ , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
+ , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
+ , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
+ , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
+ , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
+ , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
+ , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
+ , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
+ , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
+ , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
+ , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
+ , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
+ , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
+ , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
+ , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
+ , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
+ , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
+ , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
+ , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
+ , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
+ , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
+ , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
+ , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
+ , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
+ , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
+ , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
+ , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
+ , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
+ , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
+ , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
+ , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
+ , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
+ , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
+ , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
+ , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
+ , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
+ , supportedDepthResolveModes( supportedDepthResolveModes_ )
+ , supportedStencilResolveModes( supportedStencilResolveModes_ )
+ , independentResolveNone( independentResolveNone_ )
+ , independentResolve( independentResolve_ )
+ , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
+ , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
+ , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
+ , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_NAME_SIZE,VK_MAX_DRIVER_NAME_SIZE>::copy( driverName, driverName_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DRIVER_INFO_SIZE,VK_MAX_DRIVER_INFO_SIZE>::copy( driverInfo, driverInfo_ );
+ }
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) - offsetof( PhysicalDeviceVulkan12Properties, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PhysicalDeviceVulkan12Properties& operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>(&rhs);
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
- namespace layout
- {
- struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
+ PhysicalDeviceVulkan12Properties & setDriverID( VULKAN_HPP_NAMESPACE::DriverId driverID_ ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( vk::Bool32 ycbcrImageArrays_ = 0 ) VULKAN_HPP_NOEXCEPT
- : ycbcrImageArrays( ycbcrImageArrays_ )
- {}
+ driverID = driverID_;
+ return *this;
+ }
- PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>(this) = rhs;
- }
+ PhysicalDeviceVulkan12Properties & setDriverName( std::array<char,VK_MAX_DRIVER_NAME_SIZE> driverName_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( driverName, driverName_.data(), VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) );
+ return *this;
+ }
- PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>(this) = rhs;
- return *this;
- }
+ PhysicalDeviceVulkan12Properties & setDriverInfo( std::array<char,VK_MAX_DRIVER_INFO_SIZE> driverInfo_ ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( driverInfo, driverInfo_.data(), VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) );
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
- void* pNext = nullptr;
- vk::Bool32 ycbcrImageArrays;
- };
- static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
+ PhysicalDeviceVulkan12Properties & setConformanceVersion( VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ ) VULKAN_HPP_NOEXCEPT
+ {
+ conformanceVersion = conformanceVersion_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setDenormBehaviorIndependence( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ ) VULKAN_HPP_NOEXCEPT
+ {
+ denormBehaviorIndependence = denormBehaviorIndependence_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setRoundingModeIndependence( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ ) VULKAN_HPP_NOEXCEPT
+ {
+ roundingModeIndependence = roundingModeIndependence_;
+ return *this;
+ }
- struct PhysicalDeviceYcbcrImageArraysFeaturesEXT : public layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT
+ PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSignedZeroInfNanPreserveFloat16 = shaderSignedZeroInfNanPreserveFloat16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSignedZeroInfNanPreserveFloat32 = shaderSignedZeroInfNanPreserveFloat32_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderSignedZeroInfNanPreserveFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSignedZeroInfNanPreserveFloat64 = shaderSignedZeroInfNanPreserveFloat64_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDenormPreserveFloat16 = shaderDenormPreserveFloat16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDenormPreserveFloat32 = shaderDenormPreserveFloat32_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderDenormPreserveFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDenormPreserveFloat64 = shaderDenormPreserveFloat64_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDenormFlushToZeroFloat16 = shaderDenormFlushToZeroFloat16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDenormFlushToZeroFloat32 = shaderDenormFlushToZeroFloat32_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderDenormFlushToZeroFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderDenormFlushToZeroFloat64 = shaderDenormFlushToZeroFloat64_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderRoundingModeRTEFloat16 = shaderRoundingModeRTEFloat16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderRoundingModeRTEFloat32 = shaderRoundingModeRTEFloat32_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTEFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderRoundingModeRTEFloat64 = shaderRoundingModeRTEFloat64_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderRoundingModeRTZFloat16 = shaderRoundingModeRTZFloat16_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat32( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderRoundingModeRTZFloat32 = shaderRoundingModeRTZFloat32_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderRoundingModeRTZFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderRoundingModeRTZFloat64 = shaderRoundingModeRTZFloat64_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxUpdateAfterBindDescriptorsInAllPools( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxUpdateAfterBindDescriptorsInAllPools = maxUpdateAfterBindDescriptorsInAllPools_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderUniformBufferArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderUniformBufferArrayNonUniformIndexingNative = shaderUniformBufferArrayNonUniformIndexingNative_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderSampledImageArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderSampledImageArrayNonUniformIndexingNative = shaderSampledImageArrayNonUniformIndexingNative_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderStorageBufferArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderStorageBufferArrayNonUniformIndexingNative = shaderStorageBufferArrayNonUniformIndexingNative_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderStorageImageArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderStorageImageArrayNonUniformIndexingNative = shaderStorageImageArrayNonUniformIndexingNative_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setShaderInputAttachmentArrayNonUniformIndexingNative( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ ) VULKAN_HPP_NOEXCEPT
+ {
+ shaderInputAttachmentArrayNonUniformIndexingNative = shaderInputAttachmentArrayNonUniformIndexingNative_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setRobustBufferAccessUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
+ {
+ robustBufferAccessUpdateAfterBind = robustBufferAccessUpdateAfterBind_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setQuadDivergentImplicitLod( VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ ) VULKAN_HPP_NOEXCEPT
+ {
+ quadDivergentImplicitLod = quadDivergentImplicitLod_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindSamplers( uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageDescriptorUpdateAfterBindSamplers = maxPerStageDescriptorUpdateAfterBindSamplers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindUniformBuffers( uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageDescriptorUpdateAfterBindUniformBuffers = maxPerStageDescriptorUpdateAfterBindUniformBuffers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindStorageBuffers( uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageDescriptorUpdateAfterBindStorageBuffers = maxPerStageDescriptorUpdateAfterBindStorageBuffers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindSampledImages( uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageDescriptorUpdateAfterBindSampledImages = maxPerStageDescriptorUpdateAfterBindSampledImages_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindStorageImages( uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageDescriptorUpdateAfterBindStorageImages = maxPerStageDescriptorUpdateAfterBindStorageImages_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageDescriptorUpdateAfterBindInputAttachments( uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageDescriptorUpdateAfterBindInputAttachments = maxPerStageDescriptorUpdateAfterBindInputAttachments_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxPerStageUpdateAfterBindResources( uint32_t maxPerStageUpdateAfterBindResources_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxPerStageUpdateAfterBindResources = maxPerStageUpdateAfterBindResources_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindSamplers( uint32_t maxDescriptorSetUpdateAfterBindSamplers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindSamplers = maxDescriptorSetUpdateAfterBindSamplers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindUniformBuffers( uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindUniformBuffers = maxDescriptorSetUpdateAfterBindUniformBuffers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindUniformBuffersDynamic( uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageBuffers( uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindStorageBuffers = maxDescriptorSetUpdateAfterBindStorageBuffers_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageBuffersDynamic( uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindSampledImages( uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindSampledImages = maxDescriptorSetUpdateAfterBindSampledImages_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindStorageImages( uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindStorageImages = maxDescriptorSetUpdateAfterBindStorageImages_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxDescriptorSetUpdateAfterBindInputAttachments( uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxDescriptorSetUpdateAfterBindInputAttachments = maxDescriptorSetUpdateAfterBindInputAttachments_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setSupportedDepthResolveModes( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ supportedDepthResolveModes = supportedDepthResolveModes_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setSupportedStencilResolveModes( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ ) VULKAN_HPP_NOEXCEPT
+ {
+ supportedStencilResolveModes = supportedStencilResolveModes_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setIndependentResolveNone( VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ ) VULKAN_HPP_NOEXCEPT
+ {
+ independentResolveNone = independentResolveNone_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setIndependentResolve( VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ ) VULKAN_HPP_NOEXCEPT
+ {
+ independentResolve = independentResolve_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setFilterMinmaxSingleComponentFormats( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ ) VULKAN_HPP_NOEXCEPT
+ {
+ filterMinmaxSingleComponentFormats = filterMinmaxSingleComponentFormats_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setFilterMinmaxImageComponentMapping( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ ) VULKAN_HPP_NOEXCEPT
+ {
+ filterMinmaxImageComponentMapping = filterMinmaxImageComponentMapping_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setMaxTimelineSemaphoreValueDifference( uint64_t maxTimelineSemaphoreValueDifference_ ) VULKAN_HPP_NOEXCEPT
+ {
+ maxTimelineSemaphoreValueDifference = maxTimelineSemaphoreValueDifference_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkan12Properties & setFramebufferIntegerColorSampleCounts( VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ ) VULKAN_HPP_NOEXCEPT
+ {
+ framebufferIntegerColorSampleCounts = framebufferIntegerColorSampleCounts_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
+ }
+
+ operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
+ }
+
+ bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( driverID == rhs.driverID )
+ && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE * sizeof( char ) ) == 0 )
+ && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE * sizeof( char ) ) == 0 )
+ && ( conformanceVersion == rhs.conformanceVersion )
+ && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
+ && ( roundingModeIndependence == rhs.roundingModeIndependence )
+ && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
+ && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
+ && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
+ && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
+ && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
+ && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
+ && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
+ && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
+ && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
+ && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
+ && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
+ && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
+ && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
+ && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
+ && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 )
+ && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
+ && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
+ && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
+ && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
+ && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
+ && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
+ && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
+ && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
+ && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
+ && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
+ && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
+ && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
+ && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
+ && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
+ && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
+ && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
+ && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
+ && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
+ && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
+ && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
+ && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
+ && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
+ && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
+ && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
+ && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
+ && ( independentResolveNone == rhs.independentResolveNone )
+ && ( independentResolve == rhs.independentResolve )
+ && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
+ && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping )
+ && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference )
+ && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
+ }
+
+ bool operator!=( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
+ char driverName[VK_MAX_DRIVER_NAME_SIZE] = {};
+ char driverInfo[VK_MAX_DRIVER_INFO_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
+ VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
+ VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
+ uint32_t maxPerStageUpdateAfterBindResources = {};
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
+ VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
+ VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
+ uint64_t maxTimelineSemaphoreValueDifference = {};
+ VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
+ };
+ static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceVulkanMemoryModelFeatures
+ {
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT
+ : vulkanMemoryModel( vulkanMemoryModel_ )
+ , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
+ , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) - offsetof( PhysicalDeviceVulkanMemoryModelFeatures, pNext ) );
+ return *this;
+ }
+
+ PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
+
+ PhysicalDeviceVulkanMemoryModelFeatures& operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>(&rhs);
+ return *this;
+ }
+
+ PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vulkanMemoryModel = vulkanMemoryModel_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
+ return *this;
+ }
+
+ PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
+ {
+ vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
+ return *this;
+ }
+
+ operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
+ }
+
+ operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
+ }
+
+ bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
+ && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
+ && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
+ }
+
+ bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
+ VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
+ };
+ static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeatures>::value, "struct wrapper is not a standard layout!" );
+
+ struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( vk::Bool32 ycbcrImageArrays_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT( ycbcrImageArrays_ )
+ VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT
+ : ycbcrImageArrays( ycbcrImageArrays_ )
{}
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) );
+ return *this;
+ }
+
PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>(&rhs);
return *this;
}
@@ -54151,7 +51500,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( vk::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
+ PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrImageArrays = ycbcrImageArrays_;
return *this;
@@ -54179,61 +51528,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PhysicalDeviceYcbcrImageArraysFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
};
static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineCacheCreateInfo
{
- struct PipelineCacheCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( vk::PipelineCacheCreateFlags flags_ = vk::PipelineCacheCreateFlags(),
- size_t initialDataSize_ = 0,
- const void* pInitialData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , initialDataSize( initialDataSize_ )
- , pInitialData( pInitialData_ )
- {}
-
- PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCacheCreateInfo*>(this) = rhs;
- }
-
- PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCacheCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineCacheCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineCacheCreateFlags flags;
- size_t initialDataSize;
- const void* pInitialData;
- };
- static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineCacheCreateInfo : public layout::PipelineCacheCreateInfo
- {
- VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( vk::PipelineCacheCreateFlags flags_ = vk::PipelineCacheCreateFlags(),
- size_t initialDataSize_ = 0,
- const void* pInitialData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCacheCreateInfo( flags_, initialDataSize_, pInitialData_ )
+ VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {},
+ size_t initialDataSize_ = {},
+ const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , initialDataSize( initialDataSize_ )
+ , pInitialData( pInitialData_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) - offsetof( PipelineCacheCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCacheCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineCacheCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>(&rhs);
return *this;
}
@@ -54243,7 +51569,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCacheCreateInfo & setFlags( vk::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -54285,61 +51611,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineCacheCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
+ size_t initialDataSize = {};
+ const void* pInitialData = {};
};
static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineColorBlendAdvancedStateCreateInfoEXT
{
- struct PipelineColorBlendAdvancedStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( vk::Bool32 srcPremultiplied_ = 0,
- vk::Bool32 dstPremultiplied_ = 0,
- vk::BlendOverlapEXT blendOverlap_ = vk::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT
- : srcPremultiplied( srcPremultiplied_ )
- , dstPremultiplied( dstPremultiplied_ )
- , blendOverlap( blendOverlap_ )
- {}
-
- PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
- const void* pNext = nullptr;
- vk::Bool32 srcPremultiplied;
- vk::Bool32 dstPremultiplied;
- vk::BlendOverlapEXT blendOverlap;
- };
- static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineColorBlendAdvancedStateCreateInfoEXT : public layout::PipelineColorBlendAdvancedStateCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( vk::Bool32 srcPremultiplied_ = 0,
- vk::Bool32 dstPremultiplied_ = 0,
- vk::BlendOverlapEXT blendOverlap_ = vk::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineColorBlendAdvancedStateCreateInfoEXT( srcPremultiplied_, dstPremultiplied_, blendOverlap_ )
+ VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {},
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT
+ : srcPremultiplied( srcPremultiplied_ )
+ , dstPremultiplied( dstPremultiplied_ )
+ , blendOverlap( blendOverlap_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineColorBlendAdvancedStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineColorBlendAdvancedStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -54349,19 +51654,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( vk::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
{
srcPremultiplied = srcPremultiplied_;
return *this;
}
- PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( vk::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
{
dstPremultiplied = dstPremultiplied_;
return *this;
}
- PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( vk::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
+ PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
{
blendOverlap = blendOverlap_;
return *this;
@@ -54391,53 +51696,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineColorBlendAdvancedStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
+ VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
+ VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
};
static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineCompilerControlCreateInfoAMD
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( vk::PipelineCompilerControlFlagsAMD compilerControlFlags_ = vk::PipelineCompilerControlFlagsAMD() ) VULKAN_HPP_NOEXCEPT
- : compilerControlFlags( compilerControlFlags_ )
- {}
-
- PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>(this) = rhs;
- }
-
- PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
- const void* pNext = nullptr;
- vk::PipelineCompilerControlFlagsAMD compilerControlFlags;
- };
- static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineCompilerControlCreateInfoAMD : public layout::PipelineCompilerControlCreateInfoAMD
+ struct PipelineCompilerControlCreateInfoAMD
{
- VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( vk::PipelineCompilerControlFlagsAMD compilerControlFlags_ = vk::PipelineCompilerControlFlagsAMD() ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCompilerControlCreateInfoAMD( compilerControlFlags_ )
+ VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : compilerControlFlags( compilerControlFlags_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) );
+ return *this;
+ }
+
PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCompilerControlCreateInfoAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineCompilerControlCreateInfoAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>(&rhs);
return *this;
}
@@ -54447,7 +51735,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( vk::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
{
compilerControlFlags = compilerControlFlags_;
return *this;
@@ -54475,69 +51763,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineCompilerControlCreateInfoAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
};
static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineCoverageModulationStateCreateInfoNV
{
- struct PipelineCoverageModulationStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( vk::PipelineCoverageModulationStateCreateFlagsNV flags_ = vk::PipelineCoverageModulationStateCreateFlagsNV(),
- vk::CoverageModulationModeNV coverageModulationMode_ = vk::CoverageModulationModeNV::eNone,
- vk::Bool32 coverageModulationTableEnable_ = 0,
- uint32_t coverageModulationTableCount_ = 0,
- const float* pCoverageModulationTable_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , coverageModulationMode( coverageModulationMode_ )
- , coverageModulationTableEnable( coverageModulationTableEnable_ )
- , coverageModulationTableCount( coverageModulationTableCount_ )
- , pCoverageModulationTable( pCoverageModulationTable_ )
- {}
-
- PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::PipelineCoverageModulationStateCreateFlagsNV flags;
- vk::CoverageModulationModeNV coverageModulationMode;
- vk::Bool32 coverageModulationTableEnable;
- uint32_t coverageModulationTableCount;
- const float* pCoverageModulationTable;
- };
- static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineCoverageModulationStateCreateInfoNV : public layout::PipelineCoverageModulationStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( vk::PipelineCoverageModulationStateCreateFlagsNV flags_ = vk::PipelineCoverageModulationStateCreateFlagsNV(),
- vk::CoverageModulationModeNV coverageModulationMode_ = vk::CoverageModulationModeNV::eNone,
- vk::Bool32 coverageModulationTableEnable_ = 0,
- uint32_t coverageModulationTableCount_ = 0,
- const float* pCoverageModulationTable_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCoverageModulationStateCreateInfoNV( flags_, coverageModulationMode_, coverageModulationTableEnable_, coverageModulationTableCount_, pCoverageModulationTable_ )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone,
+ VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {},
+ uint32_t coverageModulationTableCount_ = {},
+ const float* pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , coverageModulationMode( coverageModulationMode_ )
+ , coverageModulationTableEnable( coverageModulationTableEnable_ )
+ , coverageModulationTableCount( coverageModulationTableCount_ )
+ , pCoverageModulationTable( pCoverageModulationTable_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCoverageModulationStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineCoverageModulationStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -54547,19 +51808,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCoverageModulationStateCreateInfoNV & setFlags( vk::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( vk::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationMode = coverageModulationMode_;
return *this;
}
- PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( vk::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
{
coverageModulationTableEnable = coverageModulationTableEnable_;
return *this;
@@ -54603,57 +51864,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineCoverageModulationStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
+ VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
+ uint32_t coverageModulationTableCount = {};
+ const float* pCoverageModulationTable = {};
};
static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineCoverageReductionStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( vk::PipelineCoverageReductionStateCreateFlagsNV flags_ = vk::PipelineCoverageReductionStateCreateFlagsNV(),
- vk::CoverageReductionModeNV coverageReductionMode_ = vk::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , coverageReductionMode( coverageReductionMode_ )
- {}
-
- PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::PipelineCoverageReductionStateCreateFlagsNV flags;
- vk::CoverageReductionModeNV coverageReductionMode;
- };
- static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineCoverageReductionStateCreateInfoNV : public layout::PipelineCoverageReductionStateCreateInfoNV
+ struct PipelineCoverageReductionStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( vk::PipelineCoverageReductionStateCreateFlagsNV flags_ = vk::PipelineCoverageReductionStateCreateFlagsNV(),
- vk::CoverageReductionModeNV coverageReductionMode_ = vk::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCoverageReductionStateCreateInfoNV( flags_, coverageReductionMode_ )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , coverageReductionMode( coverageReductionMode_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCoverageReductionStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineCoverageReductionStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -54663,13 +51907,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCoverageReductionStateCreateInfoNV & setFlags( vk::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( vk::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
{
coverageReductionMode = coverageReductionMode_;
return *this;
@@ -54698,61 +51942,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineCoverageReductionStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
};
static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineCoverageToColorStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( vk::PipelineCoverageToColorStateCreateFlagsNV flags_ = vk::PipelineCoverageToColorStateCreateFlagsNV(),
- vk::Bool32 coverageToColorEnable_ = 0,
- uint32_t coverageToColorLocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , coverageToColorEnable( coverageToColorEnable_ )
- , coverageToColorLocation( coverageToColorLocation_ )
- {}
-
- PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::PipelineCoverageToColorStateCreateFlagsNV flags;
- vk::Bool32 coverageToColorEnable;
- uint32_t coverageToColorLocation;
- };
- static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineCoverageToColorStateCreateInfoNV : public layout::PipelineCoverageToColorStateCreateInfoNV
+ struct PipelineCoverageToColorStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( vk::PipelineCoverageToColorStateCreateFlagsNV flags_ = vk::PipelineCoverageToColorStateCreateFlagsNV(),
- vk::Bool32 coverageToColorEnable_ = 0,
- uint32_t coverageToColorLocation_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCoverageToColorStateCreateInfoNV( flags_, coverageToColorEnable_, coverageToColorLocation_ )
+ VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {},
+ uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , coverageToColorEnable( coverageToColorEnable_ )
+ , coverageToColorLocation( coverageToColorLocation_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCoverageToColorStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineCoverageToColorStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -54762,13 +51984,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCoverageToColorStateCreateInfoNV & setFlags( vk::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( vk::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
{
coverageToColorEnable = coverageToColorEnable_;
return *this;
@@ -54804,25 +52026,32 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineCoverageToColorStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
+ uint32_t coverageToColorLocation = {};
};
static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
struct PipelineCreationFeedbackEXT
{
- PipelineCreationFeedbackEXT() VULKAN_HPP_NOEXCEPT
+ PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {},
+ uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , duration( duration_ )
{}
PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPipelineCreationFeedbackEXT*>(this) = rhs;
+ *this = rhs;
}
PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPipelineCreationFeedbackEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>(&rhs);
return *this;
}
@@ -54848,61 +52077,36 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::PipelineCreationFeedbackFlagsEXT flags;
- uint64_t duration;
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {};
+ uint64_t duration = {};
};
static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCreationFeedbackEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineCreationFeedbackCreateInfoEXT
{
- struct PipelineCreationFeedbackCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = nullptr,
- uint32_t pipelineStageCreationFeedbackCount_ = 0,
- vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
- , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
- , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
- {}
-
- PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
- const void* pNext = nullptr;
- vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback;
- uint32_t pipelineStageCreationFeedbackCount;
- vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks;
- };
- static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineCreationFeedbackCreateInfoEXT : public layout::PipelineCreationFeedbackCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = nullptr,
- uint32_t pipelineStageCreationFeedbackCount_ = 0,
- vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCreationFeedbackCreateInfoEXT( pPipelineCreationFeedback_, pipelineStageCreationFeedbackCount_, pPipelineStageCreationFeedbacks_ )
+ VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {},
+ uint32_t pipelineStageCreationFeedbackCount_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
+ , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
+ , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineCreationFeedbackCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineCreationFeedbackCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -54912,7 +52116,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( vk::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineCreationFeedback = pPipelineCreationFeedback_;
return *this;
@@ -54924,7 +52128,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( vk::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
+ PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
{
pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
return *this;
@@ -54954,65 +52158,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineCreationFeedbackCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {};
+ uint32_t pipelineStageCreationFeedbackCount = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {};
};
static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineCreationFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineDiscardRectangleStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( vk::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = vk::PipelineDiscardRectangleStateCreateFlagsEXT(),
- vk::DiscardRectangleModeEXT discardRectangleMode_ = vk::DiscardRectangleModeEXT::eInclusive,
- uint32_t discardRectangleCount_ = 0,
- const vk::Rect2D* pDiscardRectangles_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , discardRectangleMode( discardRectangleMode_ )
- , discardRectangleCount( discardRectangleCount_ )
- , pDiscardRectangles( pDiscardRectangles_ )
- {}
-
- PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
- const void* pNext = nullptr;
- vk::PipelineDiscardRectangleStateCreateFlagsEXT flags;
- vk::DiscardRectangleModeEXT discardRectangleMode;
- uint32_t discardRectangleCount;
- const vk::Rect2D* pDiscardRectangles;
- };
- static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineDiscardRectangleStateCreateInfoEXT : public layout::PipelineDiscardRectangleStateCreateInfoEXT
+ struct PipelineDiscardRectangleStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( vk::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = vk::PipelineDiscardRectangleStateCreateFlagsEXT(),
- vk::DiscardRectangleModeEXT discardRectangleMode_ = vk::DiscardRectangleModeEXT::eInclusive,
- uint32_t discardRectangleCount_ = 0,
- const vk::Rect2D* pDiscardRectangles_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineDiscardRectangleStateCreateInfoEXT( flags_, discardRectangleMode_, discardRectangleCount_, pDiscardRectangles_ )
+ VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
+ uint32_t discardRectangleCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , discardRectangleMode( discardRectangleMode_ )
+ , discardRectangleCount( discardRectangleCount_ )
+ , pDiscardRectangles( pDiscardRectangles_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineDiscardRectangleStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineDiscardRectangleStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -55022,13 +52203,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineDiscardRectangleStateCreateInfoEXT & setFlags( vk::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( vk::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
{
discardRectangleMode = discardRectangleMode_;
return *this;
@@ -55040,7 +52221,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const vk::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
+ PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
{
pDiscardRectangles = pDiscardRectangles_;
return *this;
@@ -55071,57 +52252,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineDiscardRectangleStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
+ uint32_t discardRectangleCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {};
};
static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineExecutableInfoKHR
{
- struct PipelineExecutableInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline(),
- uint32_t executableIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : pipeline( pipeline_ )
- , executableIndex( executableIndex_ )
- {}
-
- PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutableInfoKHR*>(this) = rhs;
- }
-
- PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutableInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
- const void* pNext = nullptr;
- vk::Pipeline pipeline;
- uint32_t executableIndex;
- };
- static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineExecutableInfoKHR : public layout::PipelineExecutableInfoKHR
- {
- VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline(),
- uint32_t executableIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutableInfoKHR( pipeline_, executableIndex_ )
+ VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
+ uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pipeline( pipeline_ )
+ , executableIndex( executableIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) - offsetof( PipelineExecutableInfoKHR, pNext ) );
+ return *this;
+ }
+
PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutableInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineExecutableInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>(&rhs);
return *this;
}
@@ -55131,7 +52294,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineExecutableInfoKHR & setPipeline( vk::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
@@ -55166,108 +52329,46 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineExecutableInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
+ uint32_t executableIndex = {};
};
static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineExecutableInternalRepresentationKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = { { 0 } },
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } },
- vk::Bool32 isText_ = 0,
- size_t dataSize_ = 0,
- void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : name{}
- , description{}
- , isText( isText_ )
- , dataSize( dataSize_ )
- , pData( pData_ )
- {
- vk::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
- vk::ConstExpressionArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
- }
-
- PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>(this) = rhs;
- }
-
- PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
- void* pNext = nullptr;
- char name[VK_MAX_DESCRIPTION_SIZE];
- char description[VK_MAX_DESCRIPTION_SIZE];
- vk::Bool32 isText;
- size_t dataSize;
- void* pData;
- };
- static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineExecutableInternalRepresentationKHR : public layout::PipelineExecutableInternalRepresentationKHR
+ struct PipelineExecutableInternalRepresentationKHR
{
- VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = { { 0 } },
- std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = { { 0 } },
- vk::Bool32 isText_ = 0,
- size_t dataSize_ = 0,
- void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutableInternalRepresentationKHR( name_, description_, isText_, dataSize_, pData_ )
- {}
-
- PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutableInternalRepresentationKHR( rhs )
- {}
-
- PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- layout::PipelineExecutableInternalRepresentationKHR::operator=(rhs);
- return *this;
- }
-
- PipelineExecutableInternalRepresentationKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
- {
- pNext = pNext_;
- return *this;
- }
-
- PipelineExecutableInternalRepresentationKHR & setName( std::array<char,VK_MAX_DESCRIPTION_SIZE> name_ ) VULKAN_HPP_NOEXCEPT
- {
- memcpy( name, name_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) );
- return *this;
- }
-
- PipelineExecutableInternalRepresentationKHR & setDescription( std::array<char,VK_MAX_DESCRIPTION_SIZE> description_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 isText_ = {},
+ size_t dataSize_ = {},
+ void* pData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : name{}
+ , description{}
+ , isText( isText_ )
+ , dataSize( dataSize_ )
+ , pData( pData_ )
{
- memcpy( description, description_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) );
- return *this;
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
}
- PipelineExecutableInternalRepresentationKHR & setIsText( vk::Bool32 isText_ ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- isText = isText_;
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) );
return *this;
}
- PipelineExecutableInternalRepresentationKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- dataSize = dataSize_;
- return *this;
+ *this = rhs;
}
- PipelineExecutableInternalRepresentationKHR & setPData( void* pData_ ) VULKAN_HPP_NOEXCEPT
+ PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- pData = pData_;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>(&rhs);
return *this;
}
@@ -55297,55 +52398,47 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineExecutableInternalRepresentationKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
+ void* pNext = {};
+ char name[VK_MAX_DESCRIPTION_SIZE] = {};
+ char description[VK_MAX_DESCRIPTION_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::Bool32 isText = {};
+ size_t dataSize = {};
+ void* pData = {};
};
static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineExecutablePropertiesKHR
{
- struct PipelineExecutablePropertiesKHR
+ PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
+ uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : stages( stages_ )
+ , name{}
+ , description{}
+ , subgroupSize( subgroupSize_ )
{
- protected:
- PipelineExecutablePropertiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>(this) = rhs;
- }
-
- PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
- void* pNext = nullptr;
- vk::ShaderStageFlags stages;
- char name[VK_MAX_DESCRIPTION_SIZE];
- char description[VK_MAX_DESCRIPTION_SIZE];
- uint32_t subgroupSize;
- };
- static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ }
- struct PipelineExecutablePropertiesKHR : public layout::PipelineExecutablePropertiesKHR
- {
- PipelineExecutablePropertiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutablePropertiesKHR()
- {}
+ VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) );
+ return *this;
+ }
PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutablePropertiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineExecutablePropertiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>(&rhs);
return *this;
}
@@ -55374,14 +52467,25 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineExecutablePropertiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
+ char name[VK_MAX_DESCRIPTION_SIZE] = {};
+ char description[VK_MAX_DESCRIPTION_SIZE] = {};
+ uint32_t subgroupSize = {};
};
static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
union PipelineExecutableStatisticValueKHR
{
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( this, &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
+ return *this;
+ }
+
operator VkPipelineExecutableStatisticValueKHR const&() const
{
return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>(this);
@@ -55393,7 +52497,7 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
- vk::Bool32 b32;
+ VULKAN_HPP_NAMESPACE::Bool32 b32;
int64_t i64;
uint64_t u64;
double f64;
@@ -55405,49 +52509,35 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
};
- namespace layout
+ struct PipelineExecutableStatisticKHR
{
- struct PipelineExecutableStatisticKHR
+ PipelineExecutableStatisticKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {},
+ std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32,
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT
+ : name{}
+ , description{}
+ , format( format_ )
+ , value( value_ )
{
- protected:
- PipelineExecutableStatisticKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutableStatisticKHR*>(this) = rhs;
- }
-
- PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineExecutableStatisticKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
- void* pNext = nullptr;
- char name[VK_MAX_DESCRIPTION_SIZE];
- char description[VK_MAX_DESCRIPTION_SIZE];
- vk::PipelineExecutableStatisticFormatKHR format;
- vk::PipelineExecutableStatisticValueKHR value;
- };
- static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "layout struct and wrapper have different size!" );
- }
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( name, name_ );
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<char,VK_MAX_DESCRIPTION_SIZE,VK_MAX_DESCRIPTION_SIZE>::copy( description, description_ );
+ }
- struct PipelineExecutableStatisticKHR : public layout::PipelineExecutableStatisticKHR
- {
- PipelineExecutableStatisticKHR() VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutableStatisticKHR()
- {}
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) );
+ return *this;
+ }
PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineExecutableStatisticKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineExecutableStatisticKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>(&rhs);
return *this;
}
@@ -55461,53 +52551,37 @@ namespace VULKAN_HPP_NAMESPACE
return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
}
- private:
- using layout::PipelineExecutableStatisticKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
+ void* pNext = {};
+ char name[VK_MAX_DESCRIPTION_SIZE] = {};
+ char description[VK_MAX_DESCRIPTION_SIZE] = {};
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
+ VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
};
static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineInfoKHR
{
- struct PipelineInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline() ) VULKAN_HPP_NOEXCEPT
- : pipeline( pipeline_ )
- {}
-
- PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineInfoKHR*>(this) = rhs;
- }
-
- PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineInfoKHR;
- const void* pNext = nullptr;
- vk::Pipeline pipeline;
- };
- static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineInfoKHR : public layout::PipelineInfoKHR
- {
- VULKAN_HPP_CONSTEXPR PipelineInfoKHR( vk::Pipeline pipeline_ = vk::Pipeline() ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineInfoKHR( pipeline_ )
+ VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
+ : pipeline( pipeline_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) - offsetof( PipelineInfoKHR, pNext ) );
+ return *this;
+ }
+
PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>(&rhs);
return *this;
}
@@ -55517,7 +52591,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineInfoKHR & setPipeline( vk::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
+ PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
{
pipeline = pipeline_;
return *this;
@@ -55545,17 +52619,19 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
};
static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct PushConstantRange
{
- VULKAN_HPP_CONSTEXPR PushConstantRange( vk::ShaderStageFlags stageFlags_ = vk::ShaderStageFlags(),
- uint32_t offset_ = 0,
- uint32_t size_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
+ uint32_t offset_ = {},
+ uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT
: stageFlags( stageFlags_ )
, offset( offset_ )
, size( size_ )
@@ -55563,16 +52639,16 @@ namespace VULKAN_HPP_NAMESPACE
PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPushConstantRange*>(this) = rhs;
+ *this = rhs;
}
PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPushConstantRange*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>(&rhs);
return *this;
}
- PushConstantRange & setStageFlags( vk::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
+ PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
{
stageFlags = stageFlags_;
return *this;
@@ -55613,70 +52689,41 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ShaderStageFlags stageFlags;
- uint32_t offset;
- uint32_t size;
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
+ uint32_t offset = {};
+ uint32_t size = {};
};
static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineLayoutCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags flags_ = vk::PipelineLayoutCreateFlags(),
- uint32_t setLayoutCount_ = 0,
- const vk::DescriptorSetLayout* pSetLayouts_ = nullptr,
- uint32_t pushConstantRangeCount_ = 0,
- const vk::PushConstantRange* pPushConstantRanges_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , setLayoutCount( setLayoutCount_ )
- , pSetLayouts( pSetLayouts_ )
- , pushConstantRangeCount( pushConstantRangeCount_ )
- , pPushConstantRanges( pPushConstantRanges_ )
- {}
-
- PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineLayoutCreateInfo*>(this) = rhs;
- }
-
- PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineLayoutCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
- const void* pNext = nullptr;
- vk::PipelineLayoutCreateFlags flags;
- uint32_t setLayoutCount;
- const vk::DescriptorSetLayout* pSetLayouts;
- uint32_t pushConstantRangeCount;
- const vk::PushConstantRange* pPushConstantRanges;
- };
- static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineLayoutCreateInfo : public layout::PipelineLayoutCreateInfo
+ struct PipelineLayoutCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( vk::PipelineLayoutCreateFlags flags_ = vk::PipelineLayoutCreateFlags(),
- uint32_t setLayoutCount_ = 0,
- const vk::DescriptorSetLayout* pSetLayouts_ = nullptr,
- uint32_t pushConstantRangeCount_ = 0,
- const vk::PushConstantRange* pPushConstantRanges_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineLayoutCreateInfo( flags_, setLayoutCount_, pSetLayouts_, pushConstantRangeCount_, pPushConstantRanges_ )
+ VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {},
+ uint32_t setLayoutCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {},
+ uint32_t pushConstantRangeCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , setLayoutCount( setLayoutCount_ )
+ , pSetLayouts( pSetLayouts_ )
+ , pushConstantRangeCount( pushConstantRangeCount_ )
+ , pPushConstantRanges( pPushConstantRanges_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) - offsetof( PipelineLayoutCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineLayoutCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineLayoutCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>(&rhs);
return *this;
}
@@ -55686,7 +52733,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineLayoutCreateInfo & setFlags( vk::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -55698,7 +52745,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineLayoutCreateInfo & setPSetLayouts( const vk::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
{
pSetLayouts = pSetLayouts_;
return *this;
@@ -55710,7 +52757,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineLayoutCreateInfo & setPPushConstantRanges( const vk::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
+ PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
{
pPushConstantRanges = pPushConstantRanges_;
return *this;
@@ -55742,61 +52789,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineLayoutCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
+ uint32_t setLayoutCount = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
+ uint32_t pushConstantRangeCount = {};
+ const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {};
};
static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineRasterizationConservativeStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = vk::PipelineRasterizationConservativeStateCreateFlagsEXT(),
- vk::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = vk::ConservativeRasterizationModeEXT::eDisabled,
- float extraPrimitiveOverestimationSize_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , conservativeRasterizationMode( conservativeRasterizationMode_ )
- , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
- {}
-
- PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
- const void* pNext = nullptr;
- vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags;
- vk::ConservativeRasterizationModeEXT conservativeRasterizationMode;
- float extraPrimitiveOverestimationSize;
- };
- static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRasterizationConservativeStateCreateInfoEXT : public layout::PipelineRasterizationConservativeStateCreateInfoEXT
+ struct PipelineRasterizationConservativeStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = vk::PipelineRasterizationConservativeStateCreateFlagsEXT(),
- vk::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = vk::ConservativeRasterizationModeEXT::eDisabled,
- float extraPrimitiveOverestimationSize_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationConservativeStateCreateInfoEXT( flags_, conservativeRasterizationMode_, extraPrimitiveOverestimationSize_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
+ float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , conservativeRasterizationMode( conservativeRasterizationMode_ )
+ , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationConservativeStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRasterizationConservativeStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -55806,13 +52834,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( vk::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( vk::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
conservativeRasterizationMode = conservativeRasterizationMode_;
return *this;
@@ -55848,57 +52876,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRasterizationConservativeStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
+ float extraPrimitiveOverestimationSize = {};
};
static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineRasterizationDepthClipStateCreateInfoEXT
{
- struct PipelineRasterizationDepthClipStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = vk::PipelineRasterizationDepthClipStateCreateFlagsEXT(),
- vk::Bool32 depthClipEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , depthClipEnable( depthClipEnable_ )
- {}
-
- PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
- const void* pNext = nullptr;
- vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags;
- vk::Bool32 depthClipEnable;
- };
- static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRasterizationDepthClipStateCreateInfoEXT : public layout::PipelineRasterizationDepthClipStateCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = vk::PipelineRasterizationDepthClipStateCreateFlagsEXT(),
- vk::Bool32 depthClipEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationDepthClipStateCreateInfoEXT( flags_, depthClipEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , depthClipEnable( depthClipEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationDepthClipStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRasterizationDepthClipStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -55908,13 +52917,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( vk::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( vk::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
{
depthClipEnable = depthClipEnable_;
return *this;
@@ -55943,65 +52952,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRasterizationDepthClipStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
+ VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
};
static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineRasterizationLineStateCreateInfoEXT
{
- struct PipelineRasterizationLineStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( vk::LineRasterizationModeEXT lineRasterizationMode_ = vk::LineRasterizationModeEXT::eDefault,
- vk::Bool32 stippledLineEnable_ = 0,
- uint32_t lineStippleFactor_ = 0,
- uint16_t lineStipplePattern_ = 0 ) VULKAN_HPP_NOEXCEPT
- : lineRasterizationMode( lineRasterizationMode_ )
- , stippledLineEnable( stippledLineEnable_ )
- , lineStippleFactor( lineStippleFactor_ )
- , lineStipplePattern( lineStipplePattern_ )
- {}
-
- PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
- const void* pNext = nullptr;
- vk::LineRasterizationModeEXT lineRasterizationMode;
- vk::Bool32 stippledLineEnable;
- uint32_t lineStippleFactor;
- uint16_t lineStipplePattern;
- };
- static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRasterizationLineStateCreateInfoEXT : public layout::PipelineRasterizationLineStateCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( vk::LineRasterizationModeEXT lineRasterizationMode_ = vk::LineRasterizationModeEXT::eDefault,
- vk::Bool32 stippledLineEnable_ = 0,
- uint32_t lineStippleFactor_ = 0,
- uint16_t lineStipplePattern_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationLineStateCreateInfoEXT( lineRasterizationMode_, stippledLineEnable_, lineStippleFactor_, lineStipplePattern_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault,
+ VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {},
+ uint32_t lineStippleFactor_ = {},
+ uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT
+ : lineRasterizationMode( lineRasterizationMode_ )
+ , stippledLineEnable( stippledLineEnable_ )
+ , lineStippleFactor( lineStippleFactor_ )
+ , lineStipplePattern( lineStipplePattern_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationLineStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRasterizationLineStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -56011,13 +52996,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( vk::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
{
lineRasterizationMode = lineRasterizationMode_;
return *this;
}
- PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( vk::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
{
stippledLineEnable = stippledLineEnable_;
return *this;
@@ -56060,53 +53045,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRasterizationLineStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
+ VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
+ uint32_t lineStippleFactor = {};
+ uint16_t lineStipplePattern = {};
};
static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineRasterizationStateRasterizationOrderAMD
{
- struct PipelineRasterizationStateRasterizationOrderAMD
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( vk::RasterizationOrderAMD rasterizationOrder_ = vk::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
- : rasterizationOrder( rasterizationOrder_ )
- {}
-
- PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>(this) = rhs;
- }
-
- PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
- const void* pNext = nullptr;
- vk::RasterizationOrderAMD rasterizationOrder;
- };
- static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRasterizationStateRasterizationOrderAMD : public layout::PipelineRasterizationStateRasterizationOrderAMD
- {
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( vk::RasterizationOrderAMD rasterizationOrder_ = vk::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationStateRasterizationOrderAMD( rasterizationOrder_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
+ : rasterizationOrder( rasterizationOrder_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) );
+ return *this;
+ }
+
PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationStateRasterizationOrderAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRasterizationStateRasterizationOrderAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>(&rhs);
return *this;
}
@@ -56116,7 +53085,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( vk::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
{
rasterizationOrder = rasterizationOrder_;
return *this;
@@ -56144,57 +53113,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRasterizationStateRasterizationOrderAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
};
static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineRasterizationStateStreamCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( vk::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = vk::PipelineRasterizationStateStreamCreateFlagsEXT(),
- uint32_t rasterizationStream_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , rasterizationStream( rasterizationStream_ )
- {}
-
- PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
- const void* pNext = nullptr;
- vk::PipelineRasterizationStateStreamCreateFlagsEXT flags;
- uint32_t rasterizationStream;
- };
- static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRasterizationStateStreamCreateInfoEXT : public layout::PipelineRasterizationStateStreamCreateInfoEXT
+ struct PipelineRasterizationStateStreamCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( vk::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = vk::PipelineRasterizationStateStreamCreateFlagsEXT(),
- uint32_t rasterizationStream_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationStateStreamCreateInfoEXT( flags_, rasterizationStream_ )
+ VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {},
+ uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , rasterizationStream( rasterizationStream_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRasterizationStateStreamCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRasterizationStateStreamCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -56204,7 +53152,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRasterizationStateStreamCreateInfoEXT & setFlags( vk::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -56239,53 +53187,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRasterizationStateStreamCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
+ uint32_t rasterizationStream = {};
};
static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineRepresentativeFragmentTestStateCreateInfoNV
{
- struct PipelineRepresentativeFragmentTestStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( vk::Bool32 representativeFragmentTestEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
- {}
-
- PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::Bool32 representativeFragmentTestEnable;
- };
- static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineRepresentativeFragmentTestStateCreateInfoNV : public layout::PipelineRepresentativeFragmentTestStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( vk::Bool32 representativeFragmentTestEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRepresentativeFragmentTestStateCreateInfoNV( representativeFragmentTestEnable_ )
+ VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineRepresentativeFragmentTestStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineRepresentativeFragmentTestStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -56295,7 +53225,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( vk::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
{
representativeFragmentTestEnable = representativeFragmentTestEnable_;
return *this;
@@ -56323,57 +53253,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineRepresentativeFragmentTestStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
};
static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineSampleLocationsStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( vk::Bool32 sampleLocationsEnable_ = 0,
- vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
- : sampleLocationsEnable( sampleLocationsEnable_ )
- , sampleLocationsInfo( sampleLocationsInfo_ )
- {}
-
- PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
- const void* pNext = nullptr;
- vk::Bool32 sampleLocationsEnable;
- vk::SampleLocationsInfoEXT sampleLocationsInfo;
- };
- static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineSampleLocationsStateCreateInfoEXT : public layout::PipelineSampleLocationsStateCreateInfoEXT
+ struct PipelineSampleLocationsStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( vk::Bool32 sampleLocationsEnable_ = 0,
- vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineSampleLocationsStateCreateInfoEXT( sampleLocationsEnable_, sampleLocationsInfo_ )
+ VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {},
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
+ : sampleLocationsEnable( sampleLocationsEnable_ )
+ , sampleLocationsInfo( sampleLocationsInfo_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineSampleLocationsStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineSampleLocationsStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -56383,13 +53292,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( vk::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsEnable = sampleLocationsEnable_;
return *this;
}
- PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( vk::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
@@ -56418,52 +53327,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineSampleLocationsStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
{
- struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
- {
- protected:
- PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT() VULKAN_HPP_NOEXCEPT
- {}
-
- PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
- void* pNext = nullptr;
- uint32_t requiredSubgroupSize;
- };
- static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : public layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
- {
- PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT() VULKAN_HPP_NOEXCEPT
- : layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT()
+ PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : requiredSubgroupSize( requiredSubgroupSize_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -56489,53 +53381,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+ void* pNext = {};
+ uint32_t requiredSubgroupSize = {};
};
static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineTessellationDomainOriginStateCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( vk::TessellationDomainOrigin domainOrigin_ = vk::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT
- : domainOrigin( domainOrigin_ )
- {}
-
- PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>(this) = rhs;
- }
-
- PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
- const void* pNext = nullptr;
- vk::TessellationDomainOrigin domainOrigin;
- };
- static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineTessellationDomainOriginStateCreateInfo : public layout::PipelineTessellationDomainOriginStateCreateInfo
+ struct PipelineTessellationDomainOriginStateCreateInfo
{
- VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( vk::TessellationDomainOrigin domainOrigin_ = vk::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineTessellationDomainOriginStateCreateInfo( domainOrigin_ )
+ VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT
+ : domainOrigin( domainOrigin_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo & operator=( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) );
+ return *this;
+ }
+
PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineTessellationDomainOriginStateCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineTessellationDomainOriginStateCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>(&rhs);
return *this;
}
@@ -56545,7 +53418,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( vk::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
+ PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
{
domainOrigin = domainOrigin_;
return *this;
@@ -56573,28 +53446,30 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineTessellationDomainOriginStateCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
};
static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct VertexInputBindingDivisorDescriptionEXT
{
- VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = 0,
- uint32_t divisor_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {},
+ uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
: binding( binding_ )
, divisor( divisor_ )
{}
VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>(this) = rhs;
+ *this = rhs;
}
VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>(&rhs);
return *this;
}
@@ -56632,57 +53507,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t binding;
- uint32_t divisor;
+ uint32_t binding = {};
+ uint32_t divisor = {};
};
static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineVertexInputDivisorStateCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0,
- const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
- , pVertexBindingDivisors( pVertexBindingDivisors_ )
- {}
-
- PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>(this) = rhs;
- }
-
- PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
- const void* pNext = nullptr;
- uint32_t vertexBindingDivisorCount;
- const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors;
- };
- static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineVertexInputDivisorStateCreateInfoEXT : public layout::PipelineVertexInputDivisorStateCreateInfoEXT
+ struct PipelineVertexInputDivisorStateCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = 0,
- const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineVertexInputDivisorStateCreateInfoEXT( vertexBindingDivisorCount_, pVertexBindingDivisors_ )
+ VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {},
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {} ) VULKAN_HPP_NOEXCEPT
+ : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
+ , pVertexBindingDivisors( pVertexBindingDivisors_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineVertexInputDivisorStateCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineVertexInputDivisorStateCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -56698,7 +53550,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const vk::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
+ PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
{
pVertexBindingDivisors = pVertexBindingDivisors_;
return *this;
@@ -56727,61 +53579,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineVertexInputDivisorStateCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
+ const void* pNext = {};
+ uint32_t vertexBindingDivisorCount = {};
+ const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {};
};
static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( vk::CoarseSampleOrderTypeNV sampleOrderType_ = vk::CoarseSampleOrderTypeNV::eDefault,
- uint32_t customSampleOrderCount_ = 0,
- const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : sampleOrderType( sampleOrderType_ )
- , customSampleOrderCount( customSampleOrderCount_ )
- , pCustomSampleOrders( pCustomSampleOrders_ )
- {}
-
- PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::CoarseSampleOrderTypeNV sampleOrderType;
- uint32_t customSampleOrderCount;
- const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders;
- };
- static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineViewportCoarseSampleOrderStateCreateInfoNV : public layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV
+ struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( vk::CoarseSampleOrderTypeNV sampleOrderType_ = vk::CoarseSampleOrderTypeNV::eDefault,
- uint32_t customSampleOrderCount_ = 0,
- const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV( sampleOrderType_, customSampleOrderCount_, pCustomSampleOrders_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault,
+ uint32_t customSampleOrderCount_ = {},
+ const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT
+ : sampleOrderType( sampleOrderType_ )
+ , customSampleOrderCount( customSampleOrderCount_ )
+ , pCustomSampleOrders( pCustomSampleOrders_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -56791,7 +53621,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( vk::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
{
sampleOrderType = sampleOrderType_;
return *this;
@@ -56803,7 +53633,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
{
pCustomSampleOrders = pCustomSampleOrders_;
return *this;
@@ -56833,57 +53663,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineViewportCoarseSampleOrderStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
+ uint32_t customSampleOrderCount = {};
+ const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {};
};
static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineViewportExclusiveScissorStateCreateInfoNV
{
- struct PipelineViewportExclusiveScissorStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0,
- const vk::Rect2D* pExclusiveScissors_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : exclusiveScissorCount( exclusiveScissorCount_ )
- , pExclusiveScissors( pExclusiveScissors_ )
- {}
-
- PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
- const void* pNext = nullptr;
- uint32_t exclusiveScissorCount;
- const vk::Rect2D* pExclusiveScissors;
- };
- static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineViewportExclusiveScissorStateCreateInfoNV : public layout::PipelineViewportExclusiveScissorStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = 0,
- const vk::Rect2D* pExclusiveScissors_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportExclusiveScissorStateCreateInfoNV( exclusiveScissorCount_, pExclusiveScissors_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT
+ : exclusiveScissorCount( exclusiveScissorCount_ )
+ , pExclusiveScissors( pExclusiveScissors_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportExclusiveScissorStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineViewportExclusiveScissorStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -56899,7 +53710,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const vk::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
{
pExclusiveScissors = pExclusiveScissors_;
return *this;
@@ -56928,28 +53739,31 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineViewportExclusiveScissorStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
+ const void* pNext = {};
+ uint32_t exclusiveScissorCount = {};
+ const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {};
};
static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
struct ShadingRatePaletteNV
{
- VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = 0,
- const vk::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT
: shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ )
, pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
{}
ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkShadingRatePaletteNV*>(this) = rhs;
+ *this = rhs;
}
ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkShadingRatePaletteNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>(&rhs);
return *this;
}
@@ -56959,7 +53773,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ShadingRatePaletteNV & setPShadingRatePaletteEntries( const vk::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
+ ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
{
pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
return *this;
@@ -56987,61 +53801,36 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t shadingRatePaletteEntryCount;
- const vk::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries;
+ uint32_t shadingRatePaletteEntryCount = {};
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
};
static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineViewportShadingRateImageStateCreateInfoNV
{
- struct PipelineViewportShadingRateImageStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( vk::Bool32 shadingRateImageEnable_ = 0,
- uint32_t viewportCount_ = 0,
- const vk::ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : shadingRateImageEnable( shadingRateImageEnable_ )
- , viewportCount( viewportCount_ )
- , pShadingRatePalettes( pShadingRatePalettes_ )
- {}
-
- PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::Bool32 shadingRateImageEnable;
- uint32_t viewportCount;
- const vk::ShadingRatePaletteNV* pShadingRatePalettes;
- };
- static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineViewportShadingRateImageStateCreateInfoNV : public layout::PipelineViewportShadingRateImageStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( vk::Bool32 shadingRateImageEnable_ = 0,
- uint32_t viewportCount_ = 0,
- const vk::ShadingRatePaletteNV* pShadingRatePalettes_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportShadingRateImageStateCreateInfoNV( shadingRateImageEnable_, viewportCount_, pShadingRatePalettes_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shadingRateImageEnable( shadingRateImageEnable_ )
+ , viewportCount( viewportCount_ )
+ , pShadingRatePalettes( pShadingRatePalettes_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportShadingRateImageStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineViewportShadingRateImageStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -57051,7 +53840,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( vk::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
{
shadingRateImageEnable = shadingRateImageEnable_;
return *this;
@@ -57063,7 +53852,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const vk::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
{
pShadingRatePalettes = pShadingRatePalettes_;
return *this;
@@ -57093,18 +53882,22 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineViewportShadingRateImageStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
+ uint32_t viewportCount = {};
+ const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {};
};
static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
struct ViewportSwizzleNV
{
- VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( vk::ViewportCoordinateSwizzleNV x_ = vk::ViewportCoordinateSwizzleNV::ePositiveX,
- vk::ViewportCoordinateSwizzleNV y_ = vk::ViewportCoordinateSwizzleNV::ePositiveX,
- vk::ViewportCoordinateSwizzleNV z_ = vk::ViewportCoordinateSwizzleNV::ePositiveX,
- vk::ViewportCoordinateSwizzleNV w_ = vk::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
: x( x_ )
, y( y_ )
, z( z_ )
@@ -57113,34 +53906,34 @@ namespace VULKAN_HPP_NAMESPACE
ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkViewportSwizzleNV*>(this) = rhs;
+ *this = rhs;
}
ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkViewportSwizzleNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>(&rhs);
return *this;
}
- ViewportSwizzleNV & setX( vk::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
+ ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
{
x = x_;
return *this;
}
- ViewportSwizzleNV & setY( vk::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
+ ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
{
y = y_;
return *this;
}
- ViewportSwizzleNV & setZ( vk::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
+ ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
{
z = z_;
return *this;
}
- ViewportSwizzleNV & setW( vk::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
+ ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
{
w = w_;
return *this;
@@ -57170,63 +53963,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ViewportCoordinateSwizzleNV x;
- vk::ViewportCoordinateSwizzleNV y;
- vk::ViewportCoordinateSwizzleNV z;
- vk::ViewportCoordinateSwizzleNV w;
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
+ VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
};
static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PipelineViewportSwizzleStateCreateInfoNV
{
- struct PipelineViewportSwizzleStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( vk::PipelineViewportSwizzleStateCreateFlagsNV flags_ = vk::PipelineViewportSwizzleStateCreateFlagsNV(),
- uint32_t viewportCount_ = 0,
- const vk::ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , viewportCount( viewportCount_ )
- , pViewportSwizzles( pViewportSwizzles_ )
- {}
-
- PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::PipelineViewportSwizzleStateCreateFlagsNV flags;
- uint32_t viewportCount;
- const vk::ViewportSwizzleNV* pViewportSwizzles;
- };
- static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineViewportSwizzleStateCreateInfoNV : public layout::PipelineViewportSwizzleStateCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( vk::PipelineViewportSwizzleStateCreateFlagsNV flags_ = vk::PipelineViewportSwizzleStateCreateFlagsNV(),
- uint32_t viewportCount_ = 0,
- const vk::ViewportSwizzleNV* pViewportSwizzles_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportSwizzleStateCreateInfoNV( flags_, viewportCount_, pViewportSwizzles_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , viewportCount( viewportCount_ )
+ , pViewportSwizzles( pViewportSwizzles_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportSwizzleStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineViewportSwizzleStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -57236,7 +54004,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportSwizzleStateCreateInfoNV & setFlags( vk::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -57248,7 +54016,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const vk::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
{
pViewportSwizzles = pViewportSwizzles_;
return *this;
@@ -57278,28 +54046,32 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineViewportSwizzleStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
+ uint32_t viewportCount = {};
+ const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {};
};
static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
struct ViewportWScalingNV
{
- VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = 0,
- float ycoeff_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {},
+ float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT
: xcoeff( xcoeff_ )
, ycoeff( ycoeff_ )
{}
ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkViewportWScalingNV*>(this) = rhs;
+ *this = rhs;
}
ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkViewportWScalingNV*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>(&rhs);
return *this;
}
@@ -57337,61 +54109,36 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- float xcoeff;
- float ycoeff;
+ float xcoeff = {};
+ float ycoeff = {};
};
static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct PipelineViewportWScalingStateCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( vk::Bool32 viewportWScalingEnable_ = 0,
- uint32_t viewportCount_ = 0,
- const vk::ViewportWScalingNV* pViewportWScalings_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : viewportWScalingEnable( viewportWScalingEnable_ )
- , viewportCount( viewportCount_ )
- , pViewportWScalings( pViewportWScalings_ )
- {}
-
- PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>(this) = rhs;
- }
-
- PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
- const void* pNext = nullptr;
- vk::Bool32 viewportWScalingEnable;
- uint32_t viewportCount;
- const vk::ViewportWScalingNV* pViewportWScalings;
- };
- static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct PipelineViewportWScalingStateCreateInfoNV : public layout::PipelineViewportWScalingStateCreateInfoNV
+ struct PipelineViewportWScalingStateCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( vk::Bool32 viewportWScalingEnable_ = 0,
- uint32_t viewportCount_ = 0,
- const vk::ViewportWScalingNV* pViewportWScalings_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportWScalingStateCreateInfoNV( viewportWScalingEnable_, viewportCount_, pViewportWScalings_ )
+ VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {},
+ uint32_t viewportCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT
+ : viewportWScalingEnable( viewportWScalingEnable_ )
+ , viewportCount( viewportCount_ )
+ , pViewportWScalings( pViewportWScalings_ )
{}
+ VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) );
+ return *this;
+ }
+
PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PipelineViewportWScalingStateCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PipelineViewportWScalingStateCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>(&rhs);
return *this;
}
@@ -57401,7 +54148,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( vk::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
{
viewportWScalingEnable = viewportWScalingEnable_;
return *this;
@@ -57413,7 +54160,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const vk::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
+ PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
{
pViewportWScalings = pViewportWScalings_;
return *this;
@@ -57443,55 +54190,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PipelineViewportWScalingStateCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
+ uint32_t viewportCount = {};
+ const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {};
};
static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_GGP
- namespace layout
+ struct PresentFrameTokenGGP
{
- struct PresentFrameTokenGGP
- {
- protected:
- VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = 0 ) VULKAN_HPP_NOEXCEPT
- : frameToken( frameToken_ )
- {}
-
- PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentFrameTokenGGP*>(this) = rhs;
- }
-
- PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentFrameTokenGGP*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePresentFrameTokenGGP;
- const void* pNext = nullptr;
- GgpFrameToken frameToken;
- };
- static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "layout struct and wrapper have different size!" );
- }
-
- struct PresentFrameTokenGGP : public layout::PresentFrameTokenGGP
- {
- VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::PresentFrameTokenGGP( frameToken_ )
+ VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT
+ : frameToken( frameToken_ )
{}
+ VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP & operator=( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) - offsetof( PresentFrameTokenGGP, pNext ) );
+ return *this;
+ }
+
PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PresentFrameTokenGGP( rhs )
- {}
+ {
+ *this = rhs;
+ }
PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PresentFrameTokenGGP::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>(&rhs);
return *this;
}
@@ -57529,74 +54259,45 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PresentFrameTokenGGP::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
+ const void* pNext = {};
+ GgpFrameToken frameToken = {};
};
static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_GGP*/
- namespace layout
- {
- struct PresentInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0,
- const vk::Semaphore* pWaitSemaphores_ = nullptr,
- uint32_t swapchainCount_ = 0,
- const vk::SwapchainKHR* pSwapchains_ = nullptr,
- const uint32_t* pImageIndices_ = nullptr,
- vk::Result* pResults_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , swapchainCount( swapchainCount_ )
- , pSwapchains( pSwapchains_ )
- , pImageIndices( pImageIndices_ )
- , pResults( pResults_ )
- {}
-
- PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentInfoKHR*>(this) = rhs;
- }
-
- PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePresentInfoKHR;
- const void* pNext = nullptr;
- uint32_t waitSemaphoreCount;
- const vk::Semaphore* pWaitSemaphores;
- uint32_t swapchainCount;
- const vk::SwapchainKHR* pSwapchains;
- const uint32_t* pImageIndices;
- vk::Result* pResults;
- };
- static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PresentInfoKHR : public layout::PresentInfoKHR
+ struct PresentInfoKHR
{
- VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0,
- const vk::Semaphore* pWaitSemaphores_ = nullptr,
- uint32_t swapchainCount_ = 0,
- const vk::SwapchainKHR* pSwapchains_ = nullptr,
- const uint32_t* pImageIndices_ = nullptr,
- vk::Result* pResults_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PresentInfoKHR( waitSemaphoreCount_, pWaitSemaphores_, swapchainCount_, pSwapchains_, pImageIndices_, pResults_ )
+ VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
+ uint32_t swapchainCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {},
+ const uint32_t* pImageIndices_ = {},
+ VULKAN_HPP_NAMESPACE::Result* pResults_ = {} ) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , swapchainCount( swapchainCount_ )
+ , pSwapchains( pSwapchains_ )
+ , pImageIndices( pImageIndices_ )
+ , pResults( pResults_ )
{}
+ VULKAN_HPP_NAMESPACE::PresentInfoKHR & operator=( VULKAN_HPP_NAMESPACE::PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) - offsetof( PresentInfoKHR, pNext ) );
+ return *this;
+ }
+
PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PresentInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PresentInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>(&rhs);
return *this;
}
@@ -57612,7 +54313,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PresentInfoKHR & setPWaitSemaphores( const vk::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
@@ -57624,7 +54325,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PresentInfoKHR & setPSwapchains( const vk::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
{
pSwapchains = pSwapchains_;
return *this;
@@ -57636,7 +54337,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PresentInfoKHR & setPResults( vk::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
+ PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
{
pResults = pResults_;
return *this;
@@ -57669,24 +54370,31 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PresentInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
+ const void* pNext = {};
+ uint32_t waitSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+ uint32_t swapchainCount = {};
+ const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {};
+ const uint32_t* pImageIndices = {};
+ VULKAN_HPP_NAMESPACE::Result* pResults = {};
};
static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
struct RectLayerKHR
{
- VULKAN_HPP_CONSTEXPR RectLayerKHR( vk::Offset2D offset_ = vk::Offset2D(),
- vk::Extent2D extent_ = vk::Extent2D(),
- uint32_t layer_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D extent_ = {},
+ uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT
: offset( offset_ )
, extent( extent_ )
, layer( layer_ )
{}
explicit RectLayerKHR( Rect2D const& rect2D,
- uint32_t layer_ = 0 )
+ uint32_t layer_ = {} )
: offset( rect2D.offset )
, extent( rect2D.extent )
, layer( layer_ )
@@ -57694,22 +54402,22 @@ namespace VULKAN_HPP_NAMESPACE
RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkRectLayerKHR*>(this) = rhs;
+ *this = rhs;
}
RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkRectLayerKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>(&rhs);
return *this;
}
- RectLayerKHR & setOffset( vk::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
+ RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT
{
offset = offset_;
return *this;
}
- RectLayerKHR & setExtent( vk::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
+ RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT
{
extent = extent_;
return *this;
@@ -57744,29 +54452,29 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Offset2D offset;
- vk::Extent2D extent;
- uint32_t layer;
+ VULKAN_HPP_NAMESPACE::Offset2D offset = {};
+ VULKAN_HPP_NAMESPACE::Extent2D extent = {};
+ uint32_t layer = {};
};
static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
struct PresentRegionKHR
{
- VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = 0,
- const vk::RectLayerKHR* pRectangles_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {},
+ const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
: rectangleCount( rectangleCount_ )
, pRectangles( pRectangles_ )
{}
PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPresentRegionKHR*>(this) = rhs;
+ *this = rhs;
}
PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPresentRegionKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>(&rhs);
return *this;
}
@@ -57776,7 +54484,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PresentRegionKHR & setPRectangles( const vk::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT
+ PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT
{
pRectangles = pRectangles_;
return *this;
@@ -57804,57 +54512,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t rectangleCount;
- const vk::RectLayerKHR* pRectangles;
+ uint32_t rectangleCount = {};
+ const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {};
};
static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PresentRegionsKHR
{
- struct PresentRegionsKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = 0,
- const vk::PresentRegionKHR* pRegions_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
- , pRegions( pRegions_ )
- {}
-
- PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentRegionsKHR*>(this) = rhs;
- }
-
- PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentRegionsKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePresentRegionsKHR;
- const void* pNext = nullptr;
- uint32_t swapchainCount;
- const vk::PresentRegionKHR* pRegions;
- };
- static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct PresentRegionsKHR : public layout::PresentRegionsKHR
- {
- VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = 0,
- const vk::PresentRegionKHR* pRegions_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PresentRegionsKHR( swapchainCount_, pRegions_ )
+ VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {} ) VULKAN_HPP_NOEXCEPT
+ : swapchainCount( swapchainCount_ )
+ , pRegions( pRegions_ )
{}
+ VULKAN_HPP_NAMESPACE::PresentRegionsKHR & operator=( VULKAN_HPP_NAMESPACE::PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) - offsetof( PresentRegionsKHR, pNext ) );
+ return *this;
+ }
+
PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PresentRegionsKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PresentRegionsKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>(&rhs);
return *this;
}
@@ -57870,7 +54555,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PresentRegionsKHR & setPRegions( const vk::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
+ PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
{
pRegions = pRegions_;
return *this;
@@ -57899,28 +54584,31 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PresentRegionsKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
+ const void* pNext = {};
+ uint32_t swapchainCount = {};
+ const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {};
};
static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
struct PresentTimeGOOGLE
{
- VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = 0,
- uint64_t desiredPresentTime_ = 0 ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {},
+ uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT
: presentID( presentID_ )
, desiredPresentTime( desiredPresentTime_ )
{}
PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPresentTimeGOOGLE*>(this) = rhs;
+ *this = rhs;
}
PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkPresentTimeGOOGLE*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>(&rhs);
return *this;
}
@@ -57958,57 +54646,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t presentID;
- uint64_t desiredPresentTime;
+ uint32_t presentID = {};
+ uint64_t desiredPresentTime = {};
};
static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct PresentTimesInfoGOOGLE
{
- struct PresentTimesInfoGOOGLE
- {
- protected:
- VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0,
- const vk::PresentTimeGOOGLE* pTimes_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : swapchainCount( swapchainCount_ )
- , pTimes( pTimes_ )
- {}
-
- PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentTimesInfoGOOGLE*>(this) = rhs;
- }
-
- PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkPresentTimesInfoGOOGLE*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
- const void* pNext = nullptr;
- uint32_t swapchainCount;
- const vk::PresentTimeGOOGLE* pTimes;
- };
- static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "layout struct and wrapper have different size!" );
- }
-
- struct PresentTimesInfoGOOGLE : public layout::PresentTimesInfoGOOGLE
- {
- VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = 0,
- const vk::PresentTimeGOOGLE* pTimes_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::PresentTimesInfoGOOGLE( swapchainCount_, pTimes_ )
+ VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : swapchainCount( swapchainCount_ )
+ , pTimes( pTimes_ )
{}
+ VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE & operator=( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) - offsetof( PresentTimesInfoGOOGLE, pNext ) );
+ return *this;
+ }
+
PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::PresentTimesInfoGOOGLE( rhs )
- {}
+ {
+ *this = rhs;
+ }
PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::PresentTimesInfoGOOGLE::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>(&rhs);
return *this;
}
@@ -58024,7 +54689,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- PresentTimesInfoGOOGLE & setPTimes( const vk::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT
+ PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT
{
pTimes = pTimes_;
return *this;
@@ -58053,53 +54718,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::PresentTimesInfoGOOGLE::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
+ const void* pNext = {};
+ uint32_t swapchainCount = {};
+ const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {};
};
static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ProtectedSubmitInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( vk::Bool32 protectedSubmit_ = 0 ) VULKAN_HPP_NOEXCEPT
- : protectedSubmit( protectedSubmit_ )
- {}
-
- ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkProtectedSubmitInfo*>(this) = rhs;
- }
-
- ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkProtectedSubmitInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eProtectedSubmitInfo;
- const void* pNext = nullptr;
- vk::Bool32 protectedSubmit;
- };
- static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct ProtectedSubmitInfo : public layout::ProtectedSubmitInfo
+ struct ProtectedSubmitInfo
{
- VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( vk::Bool32 protectedSubmit_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::ProtectedSubmitInfo( protectedSubmit_ )
+ VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT
+ : protectedSubmit( protectedSubmit_ )
{}
+ VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) - offsetof( ProtectedSubmitInfo, pNext ) );
+ return *this;
+ }
+
ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ProtectedSubmitInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ProtectedSubmitInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>(&rhs);
return *this;
}
@@ -58109,7 +54756,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ProtectedSubmitInfo & setProtectedSubmit( vk::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
+ ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
{
protectedSubmit = protectedSubmit_;
return *this;
@@ -58137,65 +54784,40 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ProtectedSubmitInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
};
static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct QueryPoolCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( vk::QueryPoolCreateFlags flags_ = vk::QueryPoolCreateFlags(),
- vk::QueryType queryType_ = vk::QueryType::eOcclusion,
- uint32_t queryCount_ = 0,
- vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , queryType( queryType_ )
- , queryCount( queryCount_ )
- , pipelineStatistics( pipelineStatistics_ )
- {}
-
- QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueryPoolCreateInfo*>(this) = rhs;
- }
-
- QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueryPoolCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eQueryPoolCreateInfo;
- const void* pNext = nullptr;
- vk::QueryPoolCreateFlags flags;
- vk::QueryType queryType;
- uint32_t queryCount;
- vk::QueryPipelineStatisticFlags pipelineStatistics;
- };
- static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct QueryPoolCreateInfo : public layout::QueryPoolCreateInfo
+ struct QueryPoolCreateInfo
{
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( vk::QueryPoolCreateFlags flags_ = vk::QueryPoolCreateFlags(),
- vk::QueryType queryType_ = vk::QueryType::eOcclusion,
- uint32_t queryCount_ = 0,
- vk::QueryPipelineStatisticFlags pipelineStatistics_ = vk::QueryPipelineStatisticFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::QueryPoolCreateInfo( flags_, queryType_, queryCount_, pipelineStatistics_ )
+ VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
+ uint32_t queryCount_ = {},
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , queryType( queryType_ )
+ , queryCount( queryCount_ )
+ , pipelineStatistics( pipelineStatistics_ )
{}
+ VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) - offsetof( QueryPoolCreateInfo, pNext ) );
+ return *this;
+ }
+
QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::QueryPoolCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::QueryPoolCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>(&rhs);
return *this;
}
@@ -58205,13 +54827,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- QueryPoolCreateInfo & setFlags( vk::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- QueryPoolCreateInfo & setQueryType( vk::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
{
queryType = queryType_;
return *this;
@@ -58223,7 +54845,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- QueryPoolCreateInfo & setPipelineStatistics( vk::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
{
pipelineStatistics = pipelineStatistics_;
return *this;
@@ -58254,53 +54876,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::QueryPoolCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
+ uint32_t queryCount = {};
+ VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
};
static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct QueryPoolCreateInfoINTEL
- {
- protected:
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( vk::QueryPoolSamplingModeINTEL performanceCountersSampling_ = vk::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
- : performanceCountersSampling( performanceCountersSampling_ )
- {}
-
- QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueryPoolCreateInfoINTEL*>(this) = rhs;
- }
-
- QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueryPoolCreateInfoINTEL*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL;
- const void* pNext = nullptr;
- vk::QueryPoolSamplingModeINTEL performanceCountersSampling;
- };
- static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "layout struct and wrapper have different size!" );
- }
-
- struct QueryPoolCreateInfoINTEL : public layout::QueryPoolCreateInfoINTEL
+ struct QueryPoolCreateInfoINTEL
{
- VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( vk::QueryPoolSamplingModeINTEL performanceCountersSampling_ = vk::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
- : layout::QueryPoolCreateInfoINTEL( performanceCountersSampling_ )
+ VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
+ : performanceCountersSampling( performanceCountersSampling_ )
{}
+ VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL & operator=( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL ) - offsetof( QueryPoolCreateInfoINTEL, pNext ) );
+ return *this;
+ }
+
QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::QueryPoolCreateInfoINTEL( rhs )
- {}
+ {
+ *this = rhs;
+ }
QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::QueryPoolCreateInfoINTEL::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL const *>(&rhs);
return *this;
}
@@ -58310,7 +54916,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( vk::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
+ QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
{
performanceCountersSampling = performanceCountersSampling_;
return *this;
@@ -58338,52 +54944,119 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::QueryPoolCreateInfoINTEL::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
};
static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<QueryPoolCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct QueryPoolPerformanceCreateInfoKHR
{
- struct QueueFamilyCheckpointPropertiesNV
+ VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {},
+ uint32_t counterIndexCount_ = {},
+ const uint32_t* pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT
+ : queueFamilyIndex( queueFamilyIndex_ )
+ , counterIndexCount( counterIndexCount_ )
+ , pCounterIndices( pCounterIndices_ )
+ {}
+
+ VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- QueueFamilyCheckpointPropertiesNV() VULKAN_HPP_NOEXCEPT
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) - offsetof( QueryPoolPerformanceCreateInfoKHR, pNext ) );
+ return *this;
+ }
- QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>(this) = rhs;
- }
+ QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>(this) = rhs;
- return *this;
- }
+ QueryPoolPerformanceCreateInfoKHR& operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>(&rhs);
+ return *this;
+ }
- public:
- vk::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
- void* pNext = nullptr;
- vk::PipelineStageFlags checkpointExecutionStageMask;
- };
- static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "layout struct and wrapper have different size!" );
- }
+ QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pNext = pNext_;
+ return *this;
+ }
+
+ QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
+ {
+ queueFamilyIndex = queueFamilyIndex_;
+ return *this;
+ }
- struct QueueFamilyCheckpointPropertiesNV : public layout::QueueFamilyCheckpointPropertiesNV
+ QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
+ {
+ counterIndexCount = counterIndexCount_;
+ return *this;
+ }
+
+ QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
+ {
+ pCounterIndices = pCounterIndices_;
+ return *this;
+ }
+
+ operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
+ }
+
+ operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ {
+ return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
+ }
+
+ bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return ( sType == rhs.sType )
+ && ( pNext == rhs.pNext )
+ && ( queueFamilyIndex == rhs.queueFamilyIndex )
+ && ( counterIndexCount == rhs.counterIndexCount )
+ && ( pCounterIndices == rhs.pCounterIndices );
+ }
+
+ bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ {
+ return !operator==( rhs );
+ }
+
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
+ const void* pNext = {};
+ uint32_t queueFamilyIndex = {};
+ uint32_t counterIndexCount = {};
+ const uint32_t* pCounterIndices = {};
+ };
+ static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+
+ struct QueueFamilyCheckpointPropertiesNV
{
- QueueFamilyCheckpointPropertiesNV() VULKAN_HPP_NOEXCEPT
- : layout::QueueFamilyCheckpointPropertiesNV()
+ QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT
+ : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
{}
+ VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) );
+ return *this;
+ }
+
QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::QueueFamilyCheckpointPropertiesNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::QueueFamilyCheckpointPropertiesNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>(&rhs);
return *this;
}
@@ -58409,25 +55082,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::QueueFamilyCheckpointPropertiesNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
};
static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
struct QueueFamilyProperties
{
- QueueFamilyProperties() VULKAN_HPP_NOEXCEPT
+ QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {},
+ uint32_t queueCount_ = {},
+ uint32_t timestampValidBits_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
+ : queueFlags( queueFlags_ )
+ , queueCount( queueCount_ )
+ , timestampValidBits( timestampValidBits_ )
+ , minImageTransferGranularity( minImageTransferGranularity_ )
{}
QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkQueueFamilyProperties*>(this) = rhs;
+ *this = rhs;
}
QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkQueueFamilyProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>(&rhs);
return *this;
}
@@ -58455,54 +55137,34 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::QueueFlags queueFlags;
- uint32_t queueCount;
- uint32_t timestampValidBits;
- vk::Extent3D minImageTransferGranularity;
+ VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
+ uint32_t queueCount = {};
+ uint32_t timestampValidBits = {};
+ VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
};
static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct QueueFamilyProperties2
{
- struct QueueFamilyProperties2
- {
- protected:
- QueueFamilyProperties2() VULKAN_HPP_NOEXCEPT
- {}
-
- QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueueFamilyProperties2*>(this) = rhs;
- }
-
- QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkQueueFamilyProperties2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eQueueFamilyProperties2;
- void* pNext = nullptr;
- vk::QueueFamilyProperties queueFamilyProperties;
- };
- static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "layout struct and wrapper have different size!" );
- }
-
- struct QueueFamilyProperties2 : public layout::QueueFamilyProperties2
- {
- QueueFamilyProperties2() VULKAN_HPP_NOEXCEPT
- : layout::QueueFamilyProperties2()
+ QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : queueFamilyProperties( queueFamilyProperties_ )
{}
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 & operator=( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) - offsetof( QueueFamilyProperties2, pNext ) );
+ return *this;
+ }
+
QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::QueueFamilyProperties2( rhs )
- {}
+ {
+ *this = rhs;
+ }
QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::QueueFamilyProperties2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>(&rhs);
return *this;
}
@@ -58528,69 +55190,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::QueueFamilyProperties2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
};
static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct RayTracingShaderGroupCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( vk::RayTracingShaderGroupTypeNV type_ = vk::RayTracingShaderGroupTypeNV::eGeneral,
- uint32_t generalShader_ = 0,
- uint32_t closestHitShader_ = 0,
- uint32_t anyHitShader_ = 0,
- uint32_t intersectionShader_ = 0 ) VULKAN_HPP_NOEXCEPT
- : type( type_ )
- , generalShader( generalShader_ )
- , closestHitShader( closestHitShader_ )
- , anyHitShader( anyHitShader_ )
- , intersectionShader( intersectionShader_ )
- {}
-
- RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>(this) = rhs;
- }
-
- RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
- const void* pNext = nullptr;
- vk::RayTracingShaderGroupTypeNV type;
- uint32_t generalShader;
- uint32_t closestHitShader;
- uint32_t anyHitShader;
- uint32_t intersectionShader;
- };
- static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct RayTracingShaderGroupCreateInfoNV : public layout::RayTracingShaderGroupCreateInfoNV
+ struct RayTracingShaderGroupCreateInfoNV
{
- VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( vk::RayTracingShaderGroupTypeNV type_ = vk::RayTracingShaderGroupTypeNV::eGeneral,
- uint32_t generalShader_ = 0,
- uint32_t closestHitShader_ = 0,
- uint32_t anyHitShader_ = 0,
- uint32_t intersectionShader_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::RayTracingShaderGroupCreateInfoNV( type_, generalShader_, closestHitShader_, anyHitShader_, intersectionShader_ )
+ VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral,
+ uint32_t generalShader_ = {},
+ uint32_t closestHitShader_ = {},
+ uint32_t anyHitShader_ = {},
+ uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT
+ : type( type_ )
+ , generalShader( generalShader_ )
+ , closestHitShader( closestHitShader_ )
+ , anyHitShader( anyHitShader_ )
+ , intersectionShader( intersectionShader_ )
{}
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) );
+ return *this;
+ }
+
RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RayTracingShaderGroupCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RayTracingShaderGroupCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>(&rhs);
return *this;
}
@@ -58600,7 +55235,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RayTracingShaderGroupCreateInfoNV & setType( vk::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT
{
type = type_;
return *this;
@@ -58656,85 +55291,54 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RayTracingShaderGroupCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral;
+ uint32_t generalShader = {};
+ uint32_t closestHitShader = {};
+ uint32_t anyHitShader = {};
+ uint32_t intersectionShader = {};
};
static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct RayTracingPipelineCreateInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(),
- uint32_t stageCount_ = 0,
- const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr,
- uint32_t groupCount_ = 0,
- const vk::RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr,
- uint32_t maxRecursionDepth_ = 0,
- vk::PipelineLayout layout_ = vk::PipelineLayout(),
- vk::Pipeline basePipelineHandle_ = vk::Pipeline(),
- int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , stageCount( stageCount_ )
- , pStages( pStages_ )
- , groupCount( groupCount_ )
- , pGroups( pGroups_ )
- , maxRecursionDepth( maxRecursionDepth_ )
- , layout( layout_ )
- , basePipelineHandle( basePipelineHandle_ )
- , basePipelineIndex( basePipelineIndex_ )
- {}
-
- RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>(this) = rhs;
- }
-
- RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
- const void* pNext = nullptr;
- vk::PipelineCreateFlags flags;
- uint32_t stageCount;
- const vk::PipelineShaderStageCreateInfo* pStages;
- uint32_t groupCount;
- const vk::RayTracingShaderGroupCreateInfoNV* pGroups;
- uint32_t maxRecursionDepth;
- vk::PipelineLayout layout;
- vk::Pipeline basePipelineHandle;
- int32_t basePipelineIndex;
- };
- static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct RayTracingPipelineCreateInfoNV : public layout::RayTracingPipelineCreateInfoNV
- {
- VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( vk::PipelineCreateFlags flags_ = vk::PipelineCreateFlags(),
- uint32_t stageCount_ = 0,
- const vk::PipelineShaderStageCreateInfo* pStages_ = nullptr,
- uint32_t groupCount_ = 0,
- const vk::RayTracingShaderGroupCreateInfoNV* pGroups_ = nullptr,
- uint32_t maxRecursionDepth_ = 0,
- vk::PipelineLayout layout_ = vk::PipelineLayout(),
- vk::Pipeline basePipelineHandle_ = vk::Pipeline(),
- int32_t basePipelineIndex_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::RayTracingPipelineCreateInfoNV( flags_, stageCount_, pStages_, groupCount_, pGroups_, maxRecursionDepth_, layout_, basePipelineHandle_, basePipelineIndex_ )
+ struct RayTracingPipelineCreateInfoNV
+ {
+ VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {},
+ uint32_t stageCount_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {},
+ uint32_t groupCount_ = {},
+ const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {},
+ uint32_t maxRecursionDepth_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {},
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {},
+ int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , stageCount( stageCount_ )
+ , pStages( pStages_ )
+ , groupCount( groupCount_ )
+ , pGroups( pGroups_ )
+ , maxRecursionDepth( maxRecursionDepth_ )
+ , layout( layout_ )
+ , basePipelineHandle( basePipelineHandle_ )
+ , basePipelineIndex( basePipelineIndex_ )
{}
+ VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & operator=( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) );
+ return *this;
+ }
+
RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RayTracingPipelineCreateInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RayTracingPipelineCreateInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>(&rhs);
return *this;
}
@@ -58744,7 +55348,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RayTracingPipelineCreateInfoNV & setFlags( vk::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -58756,7 +55360,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RayTracingPipelineCreateInfoNV & setPStages( const vk::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
{
pStages = pStages_;
return *this;
@@ -58768,7 +55372,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RayTracingPipelineCreateInfoNV & setPGroups( const vk::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
{
pGroups = pGroups_;
return *this;
@@ -58780,13 +55384,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RayTracingPipelineCreateInfoNV & setLayout( vk::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
{
layout = layout_;
return *this;
}
- RayTracingPipelineCreateInfoNV & setBasePipelineHandle( vk::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
+ RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
{
basePipelineHandle = basePipelineHandle_;
return *this;
@@ -58828,25 +55432,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RayTracingPipelineCreateInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
+ uint32_t stageCount = {};
+ const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
+ uint32_t groupCount = {};
+ const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {};
+ uint32_t maxRecursionDepth = {};
+ VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
+ VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
+ int32_t basePipelineIndex = {};
};
static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
struct RefreshCycleDurationGOOGLE
{
- RefreshCycleDurationGOOGLE() VULKAN_HPP_NOEXCEPT
+ RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT
+ : refreshDuration( refreshDuration_ )
{}
RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>(this) = rhs;
+ *this = rhs;
}
RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>(&rhs);
return *this;
}
@@ -58871,88 +55486,65 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint64_t refreshDuration;
+ uint64_t refreshDuration = {};
};
static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct RenderPassAttachmentBeginInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfoKHR( uint32_t attachmentCount_ = 0,
- const vk::ImageView* pAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- {}
-
- RenderPassAttachmentBeginInfoKHR( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR*>(this) = rhs;
- }
-
- RenderPassAttachmentBeginInfoKHR& operator=( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassAttachmentBeginInfoKHR;
- const void* pNext = nullptr;
- uint32_t attachmentCount;
- const vk::ImageView* pAttachments;
- };
- static_assert( sizeof( RenderPassAttachmentBeginInfoKHR ) == sizeof( VkRenderPassAttachmentBeginInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassAttachmentBeginInfoKHR : public layout::RenderPassAttachmentBeginInfoKHR
+ struct RenderPassAttachmentBeginInfo
{
- VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfoKHR( uint32_t attachmentCount_ = 0,
- const vk::ImageView* pAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassAttachmentBeginInfoKHR( attachmentCount_, pAttachments_ )
+ VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
+ : attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
{}
- RenderPassAttachmentBeginInfoKHR( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassAttachmentBeginInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) - offsetof( RenderPassAttachmentBeginInfo, pNext ) );
+ return *this;
+ }
- RenderPassAttachmentBeginInfoKHR& operator=( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassAttachmentBeginInfoKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ RenderPassAttachmentBeginInfo& operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>(&rhs);
return *this;
}
- RenderPassAttachmentBeginInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- RenderPassAttachmentBeginInfoKHR & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
- RenderPassAttachmentBeginInfoKHR & setPAttachments( const vk::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
- operator VkRenderPassAttachmentBeginInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkRenderPassAttachmentBeginInfoKHR*>( this );
+ return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
}
- operator VkRenderPassAttachmentBeginInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkRenderPassAttachmentBeginInfoKHR*>( this );
+ return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
}
- bool operator==( RenderPassAttachmentBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -58960,74 +55552,48 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pAttachments == rhs.pAttachments );
}
- bool operator!=( RenderPassAttachmentBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::RenderPassAttachmentBeginInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
+ const void* pNext = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
};
- static_assert( sizeof( RenderPassAttachmentBeginInfoKHR ) == sizeof( VkRenderPassAttachmentBeginInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct RenderPassBeginInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassBeginInfo( vk::RenderPass renderPass_ = vk::RenderPass(),
- vk::Framebuffer framebuffer_ = vk::Framebuffer(),
- vk::Rect2D renderArea_ = vk::Rect2D(),
- uint32_t clearValueCount_ = 0,
- const vk::ClearValue* pClearValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : renderPass( renderPass_ )
- , framebuffer( framebuffer_ )
- , renderArea( renderArea_ )
- , clearValueCount( clearValueCount_ )
- , pClearValues( pClearValues_ )
- {}
-
- RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassBeginInfo*>(this) = rhs;
- }
-
- RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassBeginInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassBeginInfo;
- const void* pNext = nullptr;
- vk::RenderPass renderPass;
- vk::Framebuffer framebuffer;
- vk::Rect2D renderArea;
- uint32_t clearValueCount;
- const vk::ClearValue* pClearValues;
- };
- static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
- struct RenderPassBeginInfo : public layout::RenderPassBeginInfo
+ struct RenderPassBeginInfo
{
- VULKAN_HPP_CONSTEXPR RenderPassBeginInfo( vk::RenderPass renderPass_ = vk::RenderPass(),
- vk::Framebuffer framebuffer_ = vk::Framebuffer(),
- vk::Rect2D renderArea_ = vk::Rect2D(),
- uint32_t clearValueCount_ = 0,
- const vk::ClearValue* pClearValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassBeginInfo( renderPass_, framebuffer_, renderArea_, clearValueCount_, pClearValues_ )
+ VULKAN_HPP_CONSTEXPR RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {},
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {},
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {},
+ uint32_t clearValueCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT
+ : renderPass( renderPass_ )
+ , framebuffer( framebuffer_ )
+ , renderArea( renderArea_ )
+ , clearValueCount( clearValueCount_ )
+ , pClearValues( pClearValues_ )
{}
+ VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) - offsetof( RenderPassBeginInfo, pNext ) );
+ return *this;
+ }
+
RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassBeginInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassBeginInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>(&rhs);
return *this;
}
@@ -59037,19 +55603,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassBeginInfo & setRenderPass( vk::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
{
renderPass = renderPass_;
return *this;
}
- RenderPassBeginInfo & setFramebuffer( vk::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
{
framebuffer = framebuffer_;
return *this;
}
- RenderPassBeginInfo & setRenderArea( vk::Rect2D renderArea_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D renderArea_ ) VULKAN_HPP_NOEXCEPT
{
renderArea = renderArea_;
return *this;
@@ -59061,7 +55627,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassBeginInfo & setPClearValues( const vk::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
{
pClearValues = pClearValues_;
return *this;
@@ -59093,24 +55659,30 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RenderPassBeginInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
+ VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
+ uint32_t clearValueCount = {};
+ const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
};
static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
struct SubpassDescription
{
- VULKAN_HPP_CONSTEXPR SubpassDescription( vk::SubpassDescriptionFlags flags_ = vk::SubpassDescriptionFlags(),
- vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- uint32_t inputAttachmentCount_ = 0,
- const vk::AttachmentReference* pInputAttachments_ = nullptr,
- uint32_t colorAttachmentCount_ = 0,
- const vk::AttachmentReference* pColorAttachments_ = nullptr,
- const vk::AttachmentReference* pResolveAttachments_ = nullptr,
- const vk::AttachmentReference* pDepthStencilAttachment_ = nullptr,
- uint32_t preserveAttachmentCount_ = 0,
- const uint32_t* pPreserveAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ uint32_t inputAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {},
+ uint32_t preserveAttachmentCount_ = {},
+ const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
: flags( flags_ )
, pipelineBindPoint( pipelineBindPoint_ )
, inputAttachmentCount( inputAttachmentCount_ )
@@ -59125,22 +55697,22 @@ namespace VULKAN_HPP_NAMESPACE
SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubpassDescription*>(this) = rhs;
+ *this = rhs;
}
SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubpassDescription*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>(&rhs);
return *this;
}
- SubpassDescription & setFlags( vk::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- SubpassDescription & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
@@ -59152,7 +55724,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubpassDescription & setPInputAttachments( const vk::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachments = pInputAttachments_;
return *this;
@@ -59164,19 +55736,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubpassDescription & setPColorAttachments( const vk::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
}
- SubpassDescription & setPResolveAttachments( const vk::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pResolveAttachments = pResolveAttachments_;
return *this;
}
- SubpassDescription & setPDepthStencilAttachment( const vk::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilAttachment = pDepthStencilAttachment_;
return *this;
@@ -59224,29 +55796,29 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::SubpassDescriptionFlags flags;
- vk::PipelineBindPoint pipelineBindPoint;
- uint32_t inputAttachmentCount;
- const vk::AttachmentReference* pInputAttachments;
- uint32_t colorAttachmentCount;
- const vk::AttachmentReference* pColorAttachments;
- const vk::AttachmentReference* pResolveAttachments;
- const vk::AttachmentReference* pDepthStencilAttachment;
- uint32_t preserveAttachmentCount;
- const uint32_t* pPreserveAttachments;
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t inputAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {};
+ uint32_t colorAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {};
+ uint32_t preserveAttachmentCount = {};
+ const uint32_t* pPreserveAttachments = {};
};
static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );
struct SubpassDependency
{
- VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = 0,
- uint32_t dstSubpass_ = 0,
- vk::PipelineStageFlags srcStageMask_ = vk::PipelineStageFlags(),
- vk::PipelineStageFlags dstStageMask_ = vk::PipelineStageFlags(),
- vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- vk::DependencyFlags dependencyFlags_ = vk::DependencyFlags() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {},
+ uint32_t dstSubpass_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
: srcSubpass( srcSubpass_ )
, dstSubpass( dstSubpass_ )
, srcStageMask( srcStageMask_ )
@@ -59258,12 +55830,12 @@ namespace VULKAN_HPP_NAMESPACE
SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubpassDependency*>(this) = rhs;
+ *this = rhs;
}
SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubpassDependency*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>(&rhs);
return *this;
}
@@ -59279,31 +55851,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubpassDependency & setSrcStageMask( vk::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- SubpassDependency & setDstStageMask( vk::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- SubpassDependency & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- SubpassDependency & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- SubpassDependency & setDependencyFlags( vk::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
@@ -59336,82 +55908,49 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t srcSubpass;
- uint32_t dstSubpass;
- vk::PipelineStageFlags srcStageMask;
- vk::PipelineStageFlags dstStageMask;
- vk::AccessFlags srcAccessMask;
- vk::AccessFlags dstAccessMask;
- vk::DependencyFlags dependencyFlags;
+ uint32_t srcSubpass = {};
+ uint32_t dstSubpass = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
};
static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct RenderPassCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(),
- uint32_t attachmentCount_ = 0,
- const vk::AttachmentDescription* pAttachments_ = nullptr,
- uint32_t subpassCount_ = 0,
- const vk::SubpassDescription* pSubpasses_ = nullptr,
- uint32_t dependencyCount_ = 0,
- const vk::SubpassDependency* pDependencies_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , subpassCount( subpassCount_ )
- , pSubpasses( pSubpasses_ )
- , dependencyCount( dependencyCount_ )
- , pDependencies( pDependencies_ )
- {}
-
- RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassCreateInfo*>(this) = rhs;
- }
-
- RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassCreateInfo;
- const void* pNext = nullptr;
- vk::RenderPassCreateFlags flags;
- uint32_t attachmentCount;
- const vk::AttachmentDescription* pAttachments;
- uint32_t subpassCount;
- const vk::SubpassDescription* pSubpasses;
- uint32_t dependencyCount;
- const vk::SubpassDependency* pDependencies;
- };
- static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassCreateInfo : public layout::RenderPassCreateInfo
+ struct RenderPassCreateInfo
{
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(),
- uint32_t attachmentCount_ = 0,
- const vk::AttachmentDescription* pAttachments_ = nullptr,
- uint32_t subpassCount_ = 0,
- const vk::SubpassDescription* pSubpasses_ = nullptr,
- uint32_t dependencyCount_ = 0,
- const vk::SubpassDependency* pDependencies_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassCreateInfo( flags_, attachmentCount_, pAttachments_, subpassCount_, pSubpasses_, dependencyCount_, pDependencies_ )
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {},
+ uint32_t subpassCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {},
+ uint32_t dependencyCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , subpassCount( subpassCount_ )
+ , pSubpasses( pSubpasses_ )
+ , dependencyCount( dependencyCount_ )
+ , pDependencies( pDependencies_ )
{}
+ VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) - offsetof( RenderPassCreateInfo, pNext ) );
+ return *this;
+ }
+
RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>(&rhs);
return *this;
}
@@ -59421,7 +55960,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassCreateInfo & setFlags( vk::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -59433,7 +55972,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassCreateInfo & setPAttachments( const vk::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
@@ -59445,7 +55984,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassCreateInfo & setPSubpasses( const vk::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
{
pSubpasses = pSubpasses_;
return *this;
@@ -59457,7 +55996,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassCreateInfo & setPDependencies( const vk::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
{
pDependencies = pDependencies_;
return *this;
@@ -59491,179 +56030,146 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RenderPassCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {};
+ uint32_t subpassCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {};
+ uint32_t dependencyCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {};
};
static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SubpassDescription2KHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SubpassDescription2KHR( vk::SubpassDescriptionFlags flags_ = vk::SubpassDescriptionFlags(),
- vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- uint32_t viewMask_ = 0,
- uint32_t inputAttachmentCount_ = 0,
- const vk::AttachmentReference2KHR* pInputAttachments_ = nullptr,
- uint32_t colorAttachmentCount_ = 0,
- const vk::AttachmentReference2KHR* pColorAttachments_ = nullptr,
- const vk::AttachmentReference2KHR* pResolveAttachments_ = nullptr,
- const vk::AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr,
- uint32_t preserveAttachmentCount_ = 0,
- const uint32_t* pPreserveAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , pipelineBindPoint( pipelineBindPoint_ )
- , viewMask( viewMask_ )
- , inputAttachmentCount( inputAttachmentCount_ )
- , pInputAttachments( pInputAttachments_ )
- , colorAttachmentCount( colorAttachmentCount_ )
- , pColorAttachments( pColorAttachments_ )
- , pResolveAttachments( pResolveAttachments_ )
- , pDepthStencilAttachment( pDepthStencilAttachment_ )
- , preserveAttachmentCount( preserveAttachmentCount_ )
- , pPreserveAttachments( pPreserveAttachments_ )
- {}
-
- SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassDescription2KHR*>(this) = rhs;
- }
-
- SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassDescription2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSubpassDescription2KHR;
- const void* pNext = nullptr;
- vk::SubpassDescriptionFlags flags;
- vk::PipelineBindPoint pipelineBindPoint;
- uint32_t viewMask;
- uint32_t inputAttachmentCount;
- const vk::AttachmentReference2KHR* pInputAttachments;
- uint32_t colorAttachmentCount;
- const vk::AttachmentReference2KHR* pColorAttachments;
- const vk::AttachmentReference2KHR* pResolveAttachments;
- const vk::AttachmentReference2KHR* pDepthStencilAttachment;
- uint32_t preserveAttachmentCount;
- const uint32_t* pPreserveAttachments;
- };
- static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SubpassDescription2KHR : public layout::SubpassDescription2KHR
- {
- VULKAN_HPP_CONSTEXPR SubpassDescription2KHR( vk::SubpassDescriptionFlags flags_ = vk::SubpassDescriptionFlags(),
- vk::PipelineBindPoint pipelineBindPoint_ = vk::PipelineBindPoint::eGraphics,
- uint32_t viewMask_ = 0,
- uint32_t inputAttachmentCount_ = 0,
- const vk::AttachmentReference2KHR* pInputAttachments_ = nullptr,
- uint32_t colorAttachmentCount_ = 0,
- const vk::AttachmentReference2KHR* pColorAttachments_ = nullptr,
- const vk::AttachmentReference2KHR* pResolveAttachments_ = nullptr,
- const vk::AttachmentReference2KHR* pDepthStencilAttachment_ = nullptr,
- uint32_t preserveAttachmentCount_ = 0,
- const uint32_t* pPreserveAttachments_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassDescription2KHR( flags_, pipelineBindPoint_, viewMask_, inputAttachmentCount_, pInputAttachments_, colorAttachmentCount_, pColorAttachments_, pResolveAttachments_, pDepthStencilAttachment_, preserveAttachmentCount_, pPreserveAttachments_ )
+ struct SubpassDescription2
+ {
+ VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
+ uint32_t viewMask_ = {},
+ uint32_t inputAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {},
+ uint32_t colorAttachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {},
+ uint32_t preserveAttachmentCount_ = {},
+ const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , pipelineBindPoint( pipelineBindPoint_ )
+ , viewMask( viewMask_ )
+ , inputAttachmentCount( inputAttachmentCount_ )
+ , pInputAttachments( pInputAttachments_ )
+ , colorAttachmentCount( colorAttachmentCount_ )
+ , pColorAttachments( pColorAttachments_ )
+ , pResolveAttachments( pResolveAttachments_ )
+ , pDepthStencilAttachment( pDepthStencilAttachment_ )
+ , preserveAttachmentCount( preserveAttachmentCount_ )
+ , pPreserveAttachments( pPreserveAttachments_ )
{}
- SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassDescription2KHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SubpassDescription2 & operator=( VULKAN_HPP_NAMESPACE::SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) - offsetof( SubpassDescription2, pNext ) );
+ return *this;
+ }
- SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SubpassDescription2KHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ SubpassDescription2& operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>(&rhs);
return *this;
}
- SubpassDescription2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SubpassDescription2KHR & setFlags( vk::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- SubpassDescription2KHR & setPipelineBindPoint( vk::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
{
pipelineBindPoint = pipelineBindPoint_;
return *this;
}
- SubpassDescription2KHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
{
viewMask = viewMask_;
return *this;
}
- SubpassDescription2KHR & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
inputAttachmentCount = inputAttachmentCount_;
return *this;
}
- SubpassDescription2KHR & setPInputAttachments( const vk::AttachmentReference2KHR* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pInputAttachments = pInputAttachments_;
return *this;
}
- SubpassDescription2KHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
colorAttachmentCount = colorAttachmentCount_;
return *this;
}
- SubpassDescription2KHR & setPColorAttachments( const vk::AttachmentReference2KHR* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pColorAttachments = pColorAttachments_;
return *this;
}
- SubpassDescription2KHR & setPResolveAttachments( const vk::AttachmentReference2KHR* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pResolveAttachments = pResolveAttachments_;
return *this;
}
- SubpassDescription2KHR & setPDepthStencilAttachment( const vk::AttachmentReference2KHR* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilAttachment = pDepthStencilAttachment_;
return *this;
}
- SubpassDescription2KHR & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
preserveAttachmentCount = preserveAttachmentCount_;
return *this;
}
- SubpassDescription2KHR & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pPreserveAttachments = pPreserveAttachments_;
return *this;
}
- operator VkSubpassDescription2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubpassDescription2KHR*>( this );
+ return *reinterpret_cast<const VkSubpassDescription2*>( this );
}
- operator VkSubpassDescription2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubpassDescription2KHR*>( this );
+ return *reinterpret_cast<VkSubpassDescription2*>( this );
}
- bool operator==( SubpassDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -59680,154 +56186,131 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pPreserveAttachments == rhs.pPreserveAttachments );
}
- bool operator!=( SubpassDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SubpassDescription2KHR::sType;
- };
- static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDescription2KHR>::value, "struct wrapper is not a standard layout!" );
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
+ VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
+ uint32_t viewMask = {};
+ uint32_t inputAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {};
+ uint32_t colorAttachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {};
+ uint32_t preserveAttachmentCount = {};
+ const uint32_t* pPreserveAttachments = {};
+ };
+ static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
+
+ struct SubpassDependency2
+ {
+ VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {},
+ uint32_t dstSubpass_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {},
+ int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT
+ : srcSubpass( srcSubpass_ )
+ , dstSubpass( dstSubpass_ )
+ , srcStageMask( srcStageMask_ )
+ , dstStageMask( dstStageMask_ )
+ , srcAccessMask( srcAccessMask_ )
+ , dstAccessMask( dstAccessMask_ )
+ , dependencyFlags( dependencyFlags_ )
+ , viewOffset( viewOffset_ )
+ {}
- namespace layout
- {
- struct SubpassDependency2KHR
+ VULKAN_HPP_NAMESPACE::SubpassDependency2 & operator=( VULKAN_HPP_NAMESPACE::SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR SubpassDependency2KHR( uint32_t srcSubpass_ = 0,
- uint32_t dstSubpass_ = 0,
- vk::PipelineStageFlags srcStageMask_ = vk::PipelineStageFlags(),
- vk::PipelineStageFlags dstStageMask_ = vk::PipelineStageFlags(),
- vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- vk::DependencyFlags dependencyFlags_ = vk::DependencyFlags(),
- int32_t viewOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : srcSubpass( srcSubpass_ )
- , dstSubpass( dstSubpass_ )
- , srcStageMask( srcStageMask_ )
- , dstStageMask( dstStageMask_ )
- , srcAccessMask( srcAccessMask_ )
- , dstAccessMask( dstAccessMask_ )
- , dependencyFlags( dependencyFlags_ )
- , viewOffset( viewOffset_ )
- {}
-
- SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassDependency2KHR*>(this) = rhs;
- }
-
- SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassDependency2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSubpassDependency2KHR;
- const void* pNext = nullptr;
- uint32_t srcSubpass;
- uint32_t dstSubpass;
- vk::PipelineStageFlags srcStageMask;
- vk::PipelineStageFlags dstStageMask;
- vk::AccessFlags srcAccessMask;
- vk::AccessFlags dstAccessMask;
- vk::DependencyFlags dependencyFlags;
- int32_t viewOffset;
- };
- static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SubpassDependency2KHR : public layout::SubpassDependency2KHR
- {
- VULKAN_HPP_CONSTEXPR SubpassDependency2KHR( uint32_t srcSubpass_ = 0,
- uint32_t dstSubpass_ = 0,
- vk::PipelineStageFlags srcStageMask_ = vk::PipelineStageFlags(),
- vk::PipelineStageFlags dstStageMask_ = vk::PipelineStageFlags(),
- vk::AccessFlags srcAccessMask_ = vk::AccessFlags(),
- vk::AccessFlags dstAccessMask_ = vk::AccessFlags(),
- vk::DependencyFlags dependencyFlags_ = vk::DependencyFlags(),
- int32_t viewOffset_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassDependency2KHR( srcSubpass_, dstSubpass_, srcStageMask_, dstStageMask_, srcAccessMask_, dstAccessMask_, dependencyFlags_, viewOffset_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) - offsetof( SubpassDependency2, pNext ) );
+ return *this;
+ }
- SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassDependency2KHR( rhs )
- {}
+ SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2& operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SubpassDependency2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>(&rhs);
return *this;
}
- SubpassDependency2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SubpassDependency2KHR & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
{
srcSubpass = srcSubpass_;
return *this;
}
- SubpassDependency2KHR & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
{
dstSubpass = dstSubpass_;
return *this;
}
- SubpassDependency2KHR & setSrcStageMask( vk::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
{
srcStageMask = srcStageMask_;
return *this;
}
- SubpassDependency2KHR & setDstStageMask( vk::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
dstStageMask = dstStageMask_;
return *this;
}
- SubpassDependency2KHR & setSrcAccessMask( vk::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
srcAccessMask = srcAccessMask_;
return *this;
}
- SubpassDependency2KHR & setDstAccessMask( vk::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
{
dstAccessMask = dstAccessMask_;
return *this;
}
- SubpassDependency2KHR & setDependencyFlags( vk::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
{
dependencyFlags = dependencyFlags_;
return *this;
}
- SubpassDependency2KHR & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
{
viewOffset = viewOffset_;
return *this;
}
- operator VkSubpassDependency2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubpassDependency2KHR*>( this );
+ return *reinterpret_cast<const VkSubpassDependency2*>( this );
}
- operator VkSubpassDependency2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubpassDependency2KHR*>( this );
+ return *reinterpret_cast<VkSubpassDependency2*>( this );
}
- bool operator==( SubpassDependency2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -59841,164 +56324,136 @@ namespace VULKAN_HPP_NAMESPACE
&& ( viewOffset == rhs.viewOffset );
}
- bool operator!=( SubpassDependency2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SubpassDependency2KHR::sType;
- };
- static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDependency2KHR>::value, "struct wrapper is not a standard layout!" );
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
+ const void* pNext = {};
+ uint32_t srcSubpass = {};
+ uint32_t dstSubpass = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
+ VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
+ VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
+ VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
+ int32_t viewOffset = {};
+ };
+ static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
+
+ struct RenderPassCreateInfo2
+ {
+ VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {},
+ uint32_t attachmentCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {},
+ uint32_t subpassCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {},
+ uint32_t dependencyCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {},
+ uint32_t correlatedViewMaskCount_ = {},
+ const uint32_t* pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , attachmentCount( attachmentCount_ )
+ , pAttachments( pAttachments_ )
+ , subpassCount( subpassCount_ )
+ , pSubpasses( pSubpasses_ )
+ , dependencyCount( dependencyCount_ )
+ , pDependencies( pDependencies_ )
+ , correlatedViewMaskCount( correlatedViewMaskCount_ )
+ , pCorrelatedViewMasks( pCorrelatedViewMasks_ )
+ {}
- namespace layout
- {
- struct RenderPassCreateInfo2KHR
+ VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & operator=( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2KHR( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(),
- uint32_t attachmentCount_ = 0,
- const vk::AttachmentDescription2KHR* pAttachments_ = nullptr,
- uint32_t subpassCount_ = 0,
- const vk::SubpassDescription2KHR* pSubpasses_ = nullptr,
- uint32_t dependencyCount_ = 0,
- const vk::SubpassDependency2KHR* pDependencies_ = nullptr,
- uint32_t correlatedViewMaskCount_ = 0,
- const uint32_t* pCorrelatedViewMasks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , attachmentCount( attachmentCount_ )
- , pAttachments( pAttachments_ )
- , subpassCount( subpassCount_ )
- , pSubpasses( pSubpasses_ )
- , dependencyCount( dependencyCount_ )
- , pDependencies( pDependencies_ )
- , correlatedViewMaskCount( correlatedViewMaskCount_ )
- , pCorrelatedViewMasks( pCorrelatedViewMasks_ )
- {}
-
- RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassCreateInfo2KHR*>(this) = rhs;
- }
-
- RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassCreateInfo2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassCreateInfo2KHR;
- const void* pNext = nullptr;
- vk::RenderPassCreateFlags flags;
- uint32_t attachmentCount;
- const vk::AttachmentDescription2KHR* pAttachments;
- uint32_t subpassCount;
- const vk::SubpassDescription2KHR* pSubpasses;
- uint32_t dependencyCount;
- const vk::SubpassDependency2KHR* pDependencies;
- uint32_t correlatedViewMaskCount;
- const uint32_t* pCorrelatedViewMasks;
- };
- static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassCreateInfo2KHR : public layout::RenderPassCreateInfo2KHR
- {
- VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2KHR( vk::RenderPassCreateFlags flags_ = vk::RenderPassCreateFlags(),
- uint32_t attachmentCount_ = 0,
- const vk::AttachmentDescription2KHR* pAttachments_ = nullptr,
- uint32_t subpassCount_ = 0,
- const vk::SubpassDescription2KHR* pSubpasses_ = nullptr,
- uint32_t dependencyCount_ = 0,
- const vk::SubpassDependency2KHR* pDependencies_ = nullptr,
- uint32_t correlatedViewMaskCount_ = 0,
- const uint32_t* pCorrelatedViewMasks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassCreateInfo2KHR( flags_, attachmentCount_, pAttachments_, subpassCount_, pSubpasses_, dependencyCount_, pDependencies_, correlatedViewMaskCount_, pCorrelatedViewMasks_ )
- {}
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) - offsetof( RenderPassCreateInfo2, pNext ) );
+ return *this;
+ }
- RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassCreateInfo2KHR( rhs )
- {}
+ RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2& operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassCreateInfo2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>(&rhs);
return *this;
}
- RenderPassCreateInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- RenderPassCreateInfo2KHR & setFlags( vk::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- RenderPassCreateInfo2KHR & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
{
attachmentCount = attachmentCount_;
return *this;
}
- RenderPassCreateInfo2KHR & setPAttachments( const vk::AttachmentDescription2KHR* pAttachments_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT
{
pAttachments = pAttachments_;
return *this;
}
- RenderPassCreateInfo2KHR & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
{
subpassCount = subpassCount_;
return *this;
}
- RenderPassCreateInfo2KHR & setPSubpasses( const vk::SubpassDescription2KHR* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
{
pSubpasses = pSubpasses_;
return *this;
}
- RenderPassCreateInfo2KHR & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
{
dependencyCount = dependencyCount_;
return *this;
}
- RenderPassCreateInfo2KHR & setPDependencies( const vk::SubpassDependency2KHR* pDependencies_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT
{
pDependencies = pDependencies_;
return *this;
}
- RenderPassCreateInfo2KHR & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
{
correlatedViewMaskCount = correlatedViewMaskCount_;
return *this;
}
- RenderPassCreateInfo2KHR & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
{
pCorrelatedViewMasks = pCorrelatedViewMasks_;
return *this;
}
- operator VkRenderPassCreateInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( this );
+ return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
}
- operator VkRenderPassCreateInfo2KHR &() VULKAN_HPP_NOEXCEPT
+ operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkRenderPassCreateInfo2KHR*>( this );
+ return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
}
- bool operator==( RenderPassCreateInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -60013,58 +56468,47 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
}
- bool operator!=( RenderPassCreateInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::RenderPassCreateInfo2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
+ uint32_t attachmentCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {};
+ uint32_t subpassCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {};
+ uint32_t dependencyCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {};
+ uint32_t correlatedViewMaskCount = {};
+ const uint32_t* pCorrelatedViewMasks = {};
};
- static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<RenderPassCreateInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct RenderPassFragmentDensityMapCreateInfoEXT
{
- struct RenderPassFragmentDensityMapCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( vk::AttachmentReference fragmentDensityMapAttachment_ = vk::AttachmentReference() ) VULKAN_HPP_NOEXCEPT
- : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
- {}
-
- RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>(this) = rhs;
- }
-
- RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
- const void* pNext = nullptr;
- vk::AttachmentReference fragmentDensityMapAttachment;
- };
- static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassFragmentDensityMapCreateInfoEXT : public layout::RenderPassFragmentDensityMapCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( vk::AttachmentReference fragmentDensityMapAttachment_ = vk::AttachmentReference() ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassFragmentDensityMapCreateInfoEXT( fragmentDensityMapAttachment_ )
+ VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
{}
+ VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassFragmentDensityMapCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassFragmentDensityMapCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -60074,7 +56518,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( vk::AttachmentReference fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
{
fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
return *this;
@@ -60102,57 +56546,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RenderPassFragmentDensityMapCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
};
static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct RenderPassInputAttachmentAspectCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0,
- const vk::InputAttachmentAspectReference* pAspectReferences_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : aspectReferenceCount( aspectReferenceCount_ )
- , pAspectReferences( pAspectReferences_ )
- {}
-
- RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>(this) = rhs;
- }
-
- RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
- const void* pNext = nullptr;
- uint32_t aspectReferenceCount;
- const vk::InputAttachmentAspectReference* pAspectReferences;
- };
- static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassInputAttachmentAspectCreateInfo : public layout::RenderPassInputAttachmentAspectCreateInfo
+ struct RenderPassInputAttachmentAspectCreateInfo
{
- VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = 0,
- const vk::InputAttachmentAspectReference* pAspectReferences_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassInputAttachmentAspectCreateInfo( aspectReferenceCount_, pAspectReferences_ )
+ VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {},
+ const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT
+ : aspectReferenceCount( aspectReferenceCount_ )
+ , pAspectReferences( pAspectReferences_ )
{}
+ VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) );
+ return *this;
+ }
+
RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassInputAttachmentAspectCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassInputAttachmentAspectCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>(&rhs);
return *this;
}
@@ -60168,7 +56591,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const vk::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
{
pAspectReferences = pAspectReferences_;
return *this;
@@ -60197,73 +56620,45 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RenderPassInputAttachmentAspectCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
+ const void* pNext = {};
+ uint32_t aspectReferenceCount = {};
+ const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {};
};
static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct RenderPassMultiviewCreateInfo
{
- struct RenderPassMultiviewCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0,
- const uint32_t* pViewMasks_ = nullptr,
- uint32_t dependencyCount_ = 0,
- const int32_t* pViewOffsets_ = nullptr,
- uint32_t correlationMaskCount_ = 0,
- const uint32_t* pCorrelationMasks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : subpassCount( subpassCount_ )
- , pViewMasks( pViewMasks_ )
- , dependencyCount( dependencyCount_ )
- , pViewOffsets( pViewOffsets_ )
- , correlationMaskCount( correlationMaskCount_ )
- , pCorrelationMasks( pCorrelationMasks_ )
- {}
-
- RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>(this) = rhs;
- }
-
- RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
- const void* pNext = nullptr;
- uint32_t subpassCount;
- const uint32_t* pViewMasks;
- uint32_t dependencyCount;
- const int32_t* pViewOffsets;
- uint32_t correlationMaskCount;
- const uint32_t* pCorrelationMasks;
- };
- static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassMultiviewCreateInfo : public layout::RenderPassMultiviewCreateInfo
- {
- VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = 0,
- const uint32_t* pViewMasks_ = nullptr,
- uint32_t dependencyCount_ = 0,
- const int32_t* pViewOffsets_ = nullptr,
- uint32_t correlationMaskCount_ = 0,
- const uint32_t* pCorrelationMasks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassMultiviewCreateInfo( subpassCount_, pViewMasks_, dependencyCount_, pViewOffsets_, correlationMaskCount_, pCorrelationMasks_ )
+ VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {},
+ const uint32_t* pViewMasks_ = {},
+ uint32_t dependencyCount_ = {},
+ const int32_t* pViewOffsets_ = {},
+ uint32_t correlationMaskCount_ = {},
+ const uint32_t* pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT
+ : subpassCount( subpassCount_ )
+ , pViewMasks( pViewMasks_ )
+ , dependencyCount( dependencyCount_ )
+ , pViewOffsets( pViewOffsets_ )
+ , correlationMaskCount( correlationMaskCount_ )
+ , pCorrelationMasks( pCorrelationMasks_ )
{}
+ VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo & operator=( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) );
+ return *this;
+ }
+
RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassMultiviewCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassMultiviewCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>(&rhs);
return *this;
}
@@ -60336,28 +56731,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RenderPassMultiviewCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
+ const void* pNext = {};
+ uint32_t subpassCount = {};
+ const uint32_t* pViewMasks = {};
+ uint32_t dependencyCount = {};
+ const int32_t* pViewOffsets = {};
+ uint32_t correlationMaskCount = {};
+ const uint32_t* pCorrelationMasks = {};
};
static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
struct SubpassSampleLocationsEXT
{
- VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = 0,
- vk::SampleLocationsInfoEXT sampleLocationsInfo_ = vk::SampleLocationsInfoEXT() ) VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {},
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
: subpassIndex( subpassIndex_ )
, sampleLocationsInfo( sampleLocationsInfo_ )
{}
SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubpassSampleLocationsEXT*>(this) = rhs;
+ *this = rhs;
}
SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSubpassSampleLocationsEXT*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>(&rhs);
return *this;
}
@@ -60367,7 +56769,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubpassSampleLocationsEXT & setSampleLocationsInfo( vk::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
+ SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
{
sampleLocationsInfo = sampleLocationsInfo_;
return *this;
@@ -60395,65 +56797,38 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t subpassIndex;
- vk::SampleLocationsInfoEXT sampleLocationsInfo;
+ uint32_t subpassIndex = {};
+ VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
};
static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct RenderPassSampleLocationsBeginInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0,
- const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr,
- uint32_t postSubpassSampleLocationsCount_ = 0,
- const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
- , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
- , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
- , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
- {}
-
- RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>(this) = rhs;
- }
-
- RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
- const void* pNext = nullptr;
- uint32_t attachmentInitialSampleLocationsCount;
- const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations;
- uint32_t postSubpassSampleLocationsCount;
- const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations;
- };
- static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct RenderPassSampleLocationsBeginInfoEXT : public layout::RenderPassSampleLocationsBeginInfoEXT
+ struct RenderPassSampleLocationsBeginInfoEXT
{
- VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = 0,
- const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = nullptr,
- uint32_t postSubpassSampleLocationsCount_ = 0,
- const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassSampleLocationsBeginInfoEXT( attachmentInitialSampleLocationsCount_, pAttachmentInitialSampleLocations_, postSubpassSampleLocationsCount_, pPostSubpassSampleLocations_ )
+ VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {},
+ uint32_t postSubpassSampleLocationsCount_ = {},
+ const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
+ : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
+ , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
+ , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
+ , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
{}
+ VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT & operator=( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) );
+ return *this;
+ }
+
RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::RenderPassSampleLocationsBeginInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::RenderPassSampleLocationsBeginInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>(&rhs);
return *this;
}
@@ -60469,7 +56844,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const vk::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
return *this;
@@ -60481,7 +56856,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const vk::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
+ RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
{
pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
return *this;
@@ -60512,113 +56887,67 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::RenderPassSampleLocationsBeginInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
+ const void* pNext = {};
+ uint32_t attachmentInitialSampleLocationsCount = {};
+ const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {};
+ uint32_t postSubpassSampleLocationsCount = {};
+ const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {};
};
static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SamplerCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR SamplerCreateInfo( vk::SamplerCreateFlags flags_ = vk::SamplerCreateFlags(),
- vk::Filter magFilter_ = vk::Filter::eNearest,
- vk::Filter minFilter_ = vk::Filter::eNearest,
- vk::SamplerMipmapMode mipmapMode_ = vk::SamplerMipmapMode::eNearest,
- vk::SamplerAddressMode addressModeU_ = vk::SamplerAddressMode::eRepeat,
- vk::SamplerAddressMode addressModeV_ = vk::SamplerAddressMode::eRepeat,
- vk::SamplerAddressMode addressModeW_ = vk::SamplerAddressMode::eRepeat,
- float mipLodBias_ = 0,
- vk::Bool32 anisotropyEnable_ = 0,
- float maxAnisotropy_ = 0,
- vk::Bool32 compareEnable_ = 0,
- vk::CompareOp compareOp_ = vk::CompareOp::eNever,
- float minLod_ = 0,
- float maxLod_ = 0,
- vk::BorderColor borderColor_ = vk::BorderColor::eFloatTransparentBlack,
- vk::Bool32 unnormalizedCoordinates_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , magFilter( magFilter_ )
- , minFilter( minFilter_ )
- , mipmapMode( mipmapMode_ )
- , addressModeU( addressModeU_ )
- , addressModeV( addressModeV_ )
- , addressModeW( addressModeW_ )
- , mipLodBias( mipLodBias_ )
- , anisotropyEnable( anisotropyEnable_ )
- , maxAnisotropy( maxAnisotropy_ )
- , compareEnable( compareEnable_ )
- , compareOp( compareOp_ )
- , minLod( minLod_ )
- , maxLod( maxLod_ )
- , borderColor( borderColor_ )
- , unnormalizedCoordinates( unnormalizedCoordinates_ )
- {}
-
- SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerCreateInfo*>(this) = rhs;
- }
-
- SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSamplerCreateInfo;
- const void* pNext = nullptr;
- vk::SamplerCreateFlags flags;
- vk::Filter magFilter;
- vk::Filter minFilter;
- vk::SamplerMipmapMode mipmapMode;
- vk::SamplerAddressMode addressModeU;
- vk::SamplerAddressMode addressModeV;
- vk::SamplerAddressMode addressModeW;
- float mipLodBias;
- vk::Bool32 anisotropyEnable;
- float maxAnisotropy;
- vk::Bool32 compareEnable;
- vk::CompareOp compareOp;
- float minLod;
- float maxLod;
- vk::BorderColor borderColor;
- vk::Bool32 unnormalizedCoordinates;
- };
- static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct SamplerCreateInfo : public layout::SamplerCreateInfo
- {
- VULKAN_HPP_CONSTEXPR SamplerCreateInfo( vk::SamplerCreateFlags flags_ = vk::SamplerCreateFlags(),
- vk::Filter magFilter_ = vk::Filter::eNearest,
- vk::Filter minFilter_ = vk::Filter::eNearest,
- vk::SamplerMipmapMode mipmapMode_ = vk::SamplerMipmapMode::eNearest,
- vk::SamplerAddressMode addressModeU_ = vk::SamplerAddressMode::eRepeat,
- vk::SamplerAddressMode addressModeV_ = vk::SamplerAddressMode::eRepeat,
- vk::SamplerAddressMode addressModeW_ = vk::SamplerAddressMode::eRepeat,
- float mipLodBias_ = 0,
- vk::Bool32 anisotropyEnable_ = 0,
- float maxAnisotropy_ = 0,
- vk::Bool32 compareEnable_ = 0,
- vk::CompareOp compareOp_ = vk::CompareOp::eNever,
- float minLod_ = 0,
- float maxLod_ = 0,
- vk::BorderColor borderColor_ = vk::BorderColor::eFloatTransparentBlack,
- vk::Bool32 unnormalizedCoordinates_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerCreateInfo( flags_, magFilter_, minFilter_, mipmapMode_, addressModeU_, addressModeV_, addressModeW_, mipLodBias_, anisotropyEnable_, maxAnisotropy_, compareEnable_, compareOp_, minLod_, maxLod_, borderColor_, unnormalizedCoordinates_ )
+ struct SamplerCreateInfo
+ {
+ VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {},
+ VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
+ float mipLodBias_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {},
+ float maxAnisotropy_ = {},
+ VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {},
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
+ float minLod_ = {},
+ float maxLod_ = {},
+ VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
+ VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , magFilter( magFilter_ )
+ , minFilter( minFilter_ )
+ , mipmapMode( mipmapMode_ )
+ , addressModeU( addressModeU_ )
+ , addressModeV( addressModeV_ )
+ , addressModeW( addressModeW_ )
+ , mipLodBias( mipLodBias_ )
+ , anisotropyEnable( anisotropyEnable_ )
+ , maxAnisotropy( maxAnisotropy_ )
+ , compareEnable( compareEnable_ )
+ , compareOp( compareOp_ )
+ , minLod( minLod_ )
+ , maxLod( maxLod_ )
+ , borderColor( borderColor_ )
+ , unnormalizedCoordinates( unnormalizedCoordinates_ )
{}
+ VULKAN_HPP_NAMESPACE::SamplerCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) - offsetof( SamplerCreateInfo, pNext ) );
+ return *this;
+ }
+
SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SamplerCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>(&rhs);
return *this;
}
@@ -60628,43 +56957,43 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SamplerCreateInfo & setFlags( vk::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- SamplerCreateInfo & setMagFilter( vk::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
{
magFilter = magFilter_;
return *this;
}
- SamplerCreateInfo & setMinFilter( vk::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
{
minFilter = minFilter_;
return *this;
}
- SamplerCreateInfo & setMipmapMode( vk::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
{
mipmapMode = mipmapMode_;
return *this;
}
- SamplerCreateInfo & setAddressModeU( vk::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
{
addressModeU = addressModeU_;
return *this;
}
- SamplerCreateInfo & setAddressModeV( vk::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
{
addressModeV = addressModeV_;
return *this;
}
- SamplerCreateInfo & setAddressModeW( vk::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
{
addressModeW = addressModeW_;
return *this;
@@ -60676,7 +57005,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SamplerCreateInfo & setAnisotropyEnable( vk::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
{
anisotropyEnable = anisotropyEnable_;
return *this;
@@ -60688,13 +57017,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SamplerCreateInfo & setCompareEnable( vk::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
{
compareEnable = compareEnable_;
return *this;
}
- SamplerCreateInfo & setCompareOp( vk::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
{
compareOp = compareOp_;
return *this;
@@ -60712,13 +57041,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SamplerCreateInfo & setBorderColor( vk::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
{
borderColor = borderColor_;
return *this;
}
- SamplerCreateInfo & setUnnormalizedCoordinates( vk::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
+ SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
{
unnormalizedCoordinates = unnormalizedCoordinates_;
return *this;
@@ -60761,165 +57090,128 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SamplerCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
+ VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
+ float mipLodBias = {};
+ VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
+ float maxAnisotropy = {};
+ VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
+ VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
+ float minLod = {};
+ float maxLod = {};
+ VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
+ VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
};
static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SamplerReductionModeCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfoEXT( vk::SamplerReductionModeEXT reductionMode_ = vk::SamplerReductionModeEXT::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
- : reductionMode( reductionMode_ )
- {}
-
- SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerReductionModeCreateInfoEXT*>(this) = rhs;
- }
-
- SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerReductionModeCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT;
- const void* pNext = nullptr;
- vk::SamplerReductionModeEXT reductionMode;
- };
- static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SamplerReductionModeCreateInfoEXT : public layout::SamplerReductionModeCreateInfoEXT
+ struct SamplerReductionModeCreateInfo
{
- VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfoEXT( vk::SamplerReductionModeEXT reductionMode_ = vk::SamplerReductionModeEXT::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerReductionModeCreateInfoEXT( reductionMode_ )
+ VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
+ : reductionMode( reductionMode_ )
{}
- SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerReductionModeCreateInfoEXT( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) - offsetof( SamplerReductionModeCreateInfo, pNext ) );
+ return *this;
+ }
- SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SamplerReductionModeCreateInfoEXT::operator=(rhs);
+ *this = rhs;
+ }
+
+ SamplerReductionModeCreateInfo& operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>(&rhs);
return *this;
}
- SamplerReductionModeCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SamplerReductionModeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SamplerReductionModeCreateInfoEXT & setReductionMode( vk::SamplerReductionModeEXT reductionMode_ ) VULKAN_HPP_NOEXCEPT
+ SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
{
reductionMode = reductionMode_;
return *this;
}
- operator VkSamplerReductionModeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSamplerReductionModeCreateInfoEXT*>( this );
+ return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
}
- operator VkSamplerReductionModeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
+ operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSamplerReductionModeCreateInfoEXT*>( this );
+ return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
}
- bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( reductionMode == rhs.reductionMode );
}
- bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SamplerReductionModeCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
};
- static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SamplerReductionModeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct SamplerYcbcrConversionCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( vk::Format format_ = vk::Format::eUndefined,
- vk::SamplerYcbcrModelConversion ycbcrModel_ = vk::SamplerYcbcrModelConversion::eRgbIdentity,
- vk::SamplerYcbcrRange ycbcrRange_ = vk::SamplerYcbcrRange::eItuFull,
- vk::ComponentMapping components_ = vk::ComponentMapping(),
- vk::ChromaLocation xChromaOffset_ = vk::ChromaLocation::eCositedEven,
- vk::ChromaLocation yChromaOffset_ = vk::ChromaLocation::eCositedEven,
- vk::Filter chromaFilter_ = vk::Filter::eNearest,
- vk::Bool32 forceExplicitReconstruction_ = 0 ) VULKAN_HPP_NOEXCEPT
- : format( format_ )
- , ycbcrModel( ycbcrModel_ )
- , ycbcrRange( ycbcrRange_ )
- , components( components_ )
- , xChromaOffset( xChromaOffset_ )
- , yChromaOffset( yChromaOffset_ )
- , chromaFilter( chromaFilter_ )
- , forceExplicitReconstruction( forceExplicitReconstruction_ )
- {}
-
- SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>(this) = rhs;
- }
-
- SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>(this) = rhs;
- return *this;
- }
+ static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
- public:
- vk::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
- const void* pNext = nullptr;
- vk::Format format;
- vk::SamplerYcbcrModelConversion ycbcrModel;
- vk::SamplerYcbcrRange ycbcrRange;
- vk::ComponentMapping components;
- vk::ChromaLocation xChromaOffset;
- vk::ChromaLocation yChromaOffset;
- vk::Filter chromaFilter;
- vk::Bool32 forceExplicitReconstruction;
- };
- static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct SamplerYcbcrConversionCreateInfo : public layout::SamplerYcbcrConversionCreateInfo
+ struct SamplerYcbcrConversionCreateInfo
{
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( vk::Format format_ = vk::Format::eUndefined,
- vk::SamplerYcbcrModelConversion ycbcrModel_ = vk::SamplerYcbcrModelConversion::eRgbIdentity,
- vk::SamplerYcbcrRange ycbcrRange_ = vk::SamplerYcbcrRange::eItuFull,
- vk::ComponentMapping components_ = vk::ComponentMapping(),
- vk::ChromaLocation xChromaOffset_ = vk::ChromaLocation::eCositedEven,
- vk::ChromaLocation yChromaOffset_ = vk::ChromaLocation::eCositedEven,
- vk::Filter chromaFilter_ = vk::Filter::eNearest,
- vk::Bool32 forceExplicitReconstruction_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerYcbcrConversionCreateInfo( format_, ycbcrModel_, ycbcrRange_, components_, xChromaOffset_, yChromaOffset_, chromaFilter_, forceExplicitReconstruction_ )
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
+ VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {},
+ VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
+ VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest,
+ VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT
+ : format( format_ )
+ , ycbcrModel( ycbcrModel_ )
+ , ycbcrRange( ycbcrRange_ )
+ , components( components_ )
+ , xChromaOffset( xChromaOffset_ )
+ , yChromaOffset( yChromaOffset_ )
+ , chromaFilter( chromaFilter_ )
+ , forceExplicitReconstruction( forceExplicitReconstruction_ )
{}
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) );
+ return *this;
+ }
+
SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerYcbcrConversionCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SamplerYcbcrConversionCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>(&rhs);
return *this;
}
@@ -60929,49 +57221,49 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SamplerYcbcrConversionCreateInfo & setFormat( vk::Format format_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
{
format = format_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setYcbcrModel( vk::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrModel = ycbcrModel_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setYcbcrRange( vk::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
{
ycbcrRange = ycbcrRange_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setComponents( vk::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT
{
components = components_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setXChromaOffset( vk::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
xChromaOffset = xChromaOffset_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setYChromaOffset( vk::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
{
yChromaOffset = yChromaOffset_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setChromaFilter( vk::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
{
chromaFilter = chromaFilter_;
return *this;
}
- SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( vk::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
{
forceExplicitReconstruction = forceExplicitReconstruction_;
return *this;
@@ -61006,52 +57298,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SamplerYcbcrConversionCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
+ VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
+ VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
+ VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
+ VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
};
static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SamplerYcbcrConversionImageFormatProperties
{
- struct SamplerYcbcrConversionImageFormatProperties
- {
- protected:
- SamplerYcbcrConversionImageFormatProperties() VULKAN_HPP_NOEXCEPT
- {}
-
- SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>(this) = rhs;
- }
-
- SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
- void* pNext = nullptr;
- uint32_t combinedImageSamplerDescriptorCount;
- };
- static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "layout struct and wrapper have different size!" );
- }
-
- struct SamplerYcbcrConversionImageFormatProperties : public layout::SamplerYcbcrConversionImageFormatProperties
- {
- SamplerYcbcrConversionImageFormatProperties() VULKAN_HPP_NOEXCEPT
- : layout::SamplerYcbcrConversionImageFormatProperties()
+ SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
+ : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
{}
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) );
+ return *this;
+ }
+
SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerYcbcrConversionImageFormatProperties( rhs )
- {}
+ {
+ *this = rhs;
+ }
SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SamplerYcbcrConversionImageFormatProperties::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>(&rhs);
return *this;
}
@@ -61077,53 +57358,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SamplerYcbcrConversionImageFormatProperties::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
+ void* pNext = {};
+ uint32_t combinedImageSamplerDescriptorCount = {};
};
static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SamplerYcbcrConversionInfo
{
- struct SamplerYcbcrConversionInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( vk::SamplerYcbcrConversion conversion_ = vk::SamplerYcbcrConversion() ) VULKAN_HPP_NOEXCEPT
- : conversion( conversion_ )
- {}
-
- SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerYcbcrConversionInfo*>(this) = rhs;
- }
-
- SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSamplerYcbcrConversionInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
- const void* pNext = nullptr;
- vk::SamplerYcbcrConversion conversion;
- };
- static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct SamplerYcbcrConversionInfo : public layout::SamplerYcbcrConversionInfo
- {
- VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( vk::SamplerYcbcrConversion conversion_ = vk::SamplerYcbcrConversion() ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerYcbcrConversionInfo( conversion_ )
+ VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT
+ : conversion( conversion_ )
{}
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo & operator=( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) );
+ return *this;
+ }
+
SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SamplerYcbcrConversionInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SamplerYcbcrConversionInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>(&rhs);
return *this;
}
@@ -61133,7 +57395,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SamplerYcbcrConversionInfo & setConversion( vk::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
+ SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
{
conversion = conversion_;
return *this;
@@ -61161,53 +57423,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SamplerYcbcrConversionInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
};
static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SemaphoreCreateInfo
{
- struct SemaphoreCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( vk::SemaphoreCreateFlags flags_ = vk::SemaphoreCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- {}
-
- SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreCreateInfo*>(this) = rhs;
- }
-
- SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSemaphoreCreateInfo;
- const void* pNext = nullptr;
- vk::SemaphoreCreateFlags flags;
- };
- static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct SemaphoreCreateInfo : public layout::SemaphoreCreateInfo
- {
- VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( vk::SemaphoreCreateFlags flags_ = vk::SemaphoreCreateFlags() ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreCreateInfo( flags_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
{}
+ VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) - offsetof( SemaphoreCreateInfo, pNext ) );
+ return *this;
+ }
+
SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SemaphoreCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>(&rhs);
return *this;
}
@@ -61217,7 +57460,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SemaphoreCreateInfo & setFlags( vk::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -61245,57 +57488,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SemaphoreCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
};
static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SemaphoreGetFdInfoKHR
{
- struct SemaphoreGetFdInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , handleType( handleType_ )
- {}
-
- SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>(this) = rhs;
- }
-
- SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
- const void* pNext = nullptr;
- vk::Semaphore semaphore;
- vk::ExternalSemaphoreHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SemaphoreGetFdInfoKHR : public layout::SemaphoreGetFdInfoKHR
- {
- VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreGetFdInfoKHR( semaphore_, handleType_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) - offsetof( SemaphoreGetFdInfoKHR, pNext ) );
+ return *this;
+ }
+
SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreGetFdInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SemaphoreGetFdInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>(&rhs);
return *this;
}
@@ -61305,13 +57527,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SemaphoreGetFdInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- SemaphoreGetFdInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -61332,7 +57554,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -61340,59 +57562,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SemaphoreGetFdInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct SemaphoreGetWin32HandleInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , handleType( handleType_ )
- {}
-
- SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>(this) = rhs;
- }
-
- SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
- const void* pNext = nullptr;
- vk::Semaphore semaphore;
- vk::ExternalSemaphoreHandleTypeFlagBits handleType;
- };
- static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SemaphoreGetWin32HandleInfoKHR : public layout::SemaphoreGetWin32HandleInfoKHR
+ struct SemaphoreGetWin32HandleInfoKHR
{
- VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- vk::ExternalSemaphoreHandleTypeFlagBits handleType_ = vk::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreGetWin32HandleInfoKHR( semaphore_, handleType_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ )
+ , handleType( handleType_ )
{}
+ VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) );
+ return *this;
+ }
+
SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreGetWin32HandleInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SemaphoreGetWin32HandleInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>(&rhs);
return *this;
}
@@ -61402,13 +57604,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SemaphoreGetWin32HandleInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- SemaphoreGetWin32HandleInfoKHR & setHandleType( vk::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
{
handleType = handleType_;
return *this;
@@ -61429,7 +57631,7 @@ namespace VULKAN_HPP_NAMESPACE
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( semaphore == rhs.semaphore )
- && vk::operator==( handleType, rhs.handleType );
+ && ( handleType == rhs.handleType );
}
bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
@@ -61437,90 +57639,70 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SemaphoreGetWin32HandleInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
};
static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
+ struct SemaphoreSignalInfo
{
- struct SemaphoreSignalInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SemaphoreSignalInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- uint64_t value_ = 0 ) VULKAN_HPP_NOEXCEPT
- : semaphore( semaphore_ )
- , value( value_ )
- {}
-
- SemaphoreSignalInfoKHR( VkSemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreSignalInfoKHR*>(this) = rhs;
- }
-
- SemaphoreSignalInfoKHR& operator=( VkSemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreSignalInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSemaphoreSignalInfoKHR;
- const void* pNext = nullptr;
- vk::Semaphore semaphore;
- uint64_t value;
- };
- static_assert( sizeof( SemaphoreSignalInfoKHR ) == sizeof( VkSemaphoreSignalInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SemaphoreSignalInfoKHR : public layout::SemaphoreSignalInfoKHR
- {
- VULKAN_HPP_CONSTEXPR SemaphoreSignalInfoKHR( vk::Semaphore semaphore_ = vk::Semaphore(),
- uint64_t value_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreSignalInfoKHR( semaphore_, value_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
+ uint64_t value_ = {} ) VULKAN_HPP_NOEXCEPT
+ : semaphore( semaphore_ )
+ , value( value_ )
{}
- SemaphoreSignalInfoKHR( VkSemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreSignalInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) - offsetof( SemaphoreSignalInfo, pNext ) );
+ return *this;
+ }
+
+ SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- SemaphoreSignalInfoKHR& operator=( VkSemaphoreSignalInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo& operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SemaphoreSignalInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>(&rhs);
return *this;
}
- SemaphoreSignalInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SemaphoreSignalInfoKHR & setSemaphore( vk::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
{
semaphore = semaphore_;
return *this;
}
- SemaphoreSignalInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
{
value = value_;
return *this;
}
- operator VkSemaphoreSignalInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSemaphoreSignalInfoKHR*>( this );
+ return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
}
- operator VkSemaphoreSignalInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSemaphoreSignalInfoKHR*>( this );
+ return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
}
- bool operator==( SemaphoreSignalInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -61528,94 +57710,74 @@ namespace VULKAN_HPP_NAMESPACE
&& ( value == rhs.value );
}
- bool operator!=( SemaphoreSignalInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SemaphoreSignalInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
+ uint64_t value = {};
};
- static_assert( sizeof( SemaphoreSignalInfoKHR ) == sizeof( VkSemaphoreSignalInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreSignalInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct SemaphoreTypeCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfoKHR( vk::SemaphoreTypeKHR semaphoreType_ = vk::SemaphoreTypeKHR::eBinary,
- uint64_t initialValue_ = 0 ) VULKAN_HPP_NOEXCEPT
- : semaphoreType( semaphoreType_ )
- , initialValue( initialValue_ )
- {}
-
- SemaphoreTypeCreateInfoKHR( VkSemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreTypeCreateInfoKHR*>(this) = rhs;
- }
+ static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
- SemaphoreTypeCreateInfoKHR& operator=( VkSemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreTypeCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSemaphoreTypeCreateInfoKHR;
- const void* pNext = nullptr;
- vk::SemaphoreTypeKHR semaphoreType;
- uint64_t initialValue;
- };
- static_assert( sizeof( SemaphoreTypeCreateInfoKHR ) == sizeof( VkSemaphoreTypeCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SemaphoreTypeCreateInfoKHR : public layout::SemaphoreTypeCreateInfoKHR
+ struct SemaphoreTypeCreateInfo
{
- VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfoKHR( vk::SemaphoreTypeKHR semaphoreType_ = vk::SemaphoreTypeKHR::eBinary,
- uint64_t initialValue_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreTypeCreateInfoKHR( semaphoreType_, initialValue_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary,
+ uint64_t initialValue_ = {} ) VULKAN_HPP_NOEXCEPT
+ : semaphoreType( semaphoreType_ )
+ , initialValue( initialValue_ )
{}
- SemaphoreTypeCreateInfoKHR( VkSemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreTypeCreateInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) - offsetof( SemaphoreTypeCreateInfo, pNext ) );
+ return *this;
+ }
- SemaphoreTypeCreateInfoKHR& operator=( VkSemaphoreTypeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SemaphoreTypeCreateInfoKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ SemaphoreTypeCreateInfo& operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>(&rhs);
return *this;
}
- SemaphoreTypeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SemaphoreTypeCreateInfoKHR & setSemaphoreType( vk::SemaphoreTypeKHR semaphoreType_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreType = semaphoreType_;
return *this;
}
- SemaphoreTypeCreateInfoKHR & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
{
initialValue = initialValue_;
return *this;
}
- operator VkSemaphoreTypeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSemaphoreTypeCreateInfoKHR*>( this );
+ return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
}
- operator VkSemaphoreTypeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSemaphoreTypeCreateInfoKHR*>( this );
+ return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
}
- bool operator==( SemaphoreTypeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -61623,114 +57785,90 @@ namespace VULKAN_HPP_NAMESPACE
&& ( initialValue == rhs.initialValue );
}
- bool operator!=( SemaphoreTypeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SemaphoreTypeCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
+ uint64_t initialValue = {};
};
- static_assert( sizeof( SemaphoreTypeCreateInfoKHR ) == sizeof( VkSemaphoreTypeCreateInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreTypeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct SemaphoreWaitInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SemaphoreWaitInfoKHR( vk::SemaphoreWaitFlagsKHR flags_ = vk::SemaphoreWaitFlagsKHR(),
- uint32_t semaphoreCount_ = 0,
- const vk::Semaphore* pSemaphores_ = nullptr,
- const uint64_t* pValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , semaphoreCount( semaphoreCount_ )
- , pSemaphores( pSemaphores_ )
- , pValues( pValues_ )
- {}
-
- SemaphoreWaitInfoKHR( VkSemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreWaitInfoKHR*>(this) = rhs;
- }
-
- SemaphoreWaitInfoKHR& operator=( VkSemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSemaphoreWaitInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSemaphoreWaitInfoKHR;
- const void* pNext = nullptr;
- vk::SemaphoreWaitFlagsKHR flags;
- uint32_t semaphoreCount;
- const vk::Semaphore* pSemaphores;
- const uint64_t* pValues;
- };
- static_assert( sizeof( SemaphoreWaitInfoKHR ) == sizeof( VkSemaphoreWaitInfoKHR ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
- struct SemaphoreWaitInfoKHR : public layout::SemaphoreWaitInfoKHR
+ struct SemaphoreWaitInfo
{
- VULKAN_HPP_CONSTEXPR SemaphoreWaitInfoKHR( vk::SemaphoreWaitFlagsKHR flags_ = vk::SemaphoreWaitFlagsKHR(),
- uint32_t semaphoreCount_ = 0,
- const vk::Semaphore* pSemaphores_ = nullptr,
- const uint64_t* pValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreWaitInfoKHR( flags_, semaphoreCount_, pSemaphores_, pValues_ )
+ VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {},
+ uint32_t semaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {},
+ const uint64_t* pValues_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , semaphoreCount( semaphoreCount_ )
+ , pSemaphores( pSemaphores_ )
+ , pValues( pValues_ )
{}
- SemaphoreWaitInfoKHR( VkSemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SemaphoreWaitInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & operator=( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) - offsetof( SemaphoreWaitInfo, pNext ) );
+ return *this;
+ }
- SemaphoreWaitInfoKHR& operator=( VkSemaphoreWaitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SemaphoreWaitInfoKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ SemaphoreWaitInfo& operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>(&rhs);
return *this;
}
- SemaphoreWaitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SemaphoreWaitInfoKHR & setFlags( vk::SemaphoreWaitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- SemaphoreWaitInfoKHR & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
{
semaphoreCount = semaphoreCount_;
return *this;
}
- SemaphoreWaitInfoKHR & setPSemaphores( const vk::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSemaphores = pSemaphores_;
return *this;
}
- SemaphoreWaitInfoKHR & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
+ SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
{
pValues = pValues_;
return *this;
}
- operator VkSemaphoreWaitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSemaphoreWaitInfoKHR*>( this );
+ return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
}
- operator VkSemaphoreWaitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSemaphoreWaitInfoKHR*>( this );
+ return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
}
- bool operator==( SemaphoreWaitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -61740,66 +57878,46 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pValues == rhs.pValues );
}
- bool operator!=( SemaphoreWaitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SemaphoreWaitInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
+ uint32_t semaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {};
+ const uint64_t* pValues = {};
};
- static_assert( sizeof( SemaphoreWaitInfoKHR ) == sizeof( VkSemaphoreWaitInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SemaphoreWaitInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct ShaderModuleCreateInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( vk::ShaderModuleCreateFlags flags_ = vk::ShaderModuleCreateFlags(),
- size_t codeSize_ = 0,
- const uint32_t* pCode_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , codeSize( codeSize_ )
- , pCode( pCode_ )
- {}
-
- ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkShaderModuleCreateInfo*>(this) = rhs;
- }
-
- ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkShaderModuleCreateInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eShaderModuleCreateInfo;
- const void* pNext = nullptr;
- vk::ShaderModuleCreateFlags flags;
- size_t codeSize;
- const uint32_t* pCode;
- };
- static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
- struct ShaderModuleCreateInfo : public layout::ShaderModuleCreateInfo
+ struct ShaderModuleCreateInfo
{
- VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( vk::ShaderModuleCreateFlags flags_ = vk::ShaderModuleCreateFlags(),
- size_t codeSize_ = 0,
- const uint32_t* pCode_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ShaderModuleCreateInfo( flags_, codeSize_, pCode_ )
+ VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {},
+ size_t codeSize_ = {},
+ const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , codeSize( codeSize_ )
+ , pCode( pCode_ )
{}
+ VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) );
+ return *this;
+ }
+
ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ShaderModuleCreateInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ShaderModuleCreateInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>(&rhs);
return *this;
}
@@ -61809,7 +57927,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ShaderModuleCreateInfo & setFlags( vk::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -61851,53 +57969,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ShaderModuleCreateInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
+ size_t codeSize = {};
+ const uint32_t* pCode = {};
};
static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ShaderModuleValidationCacheCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( vk::ValidationCacheEXT validationCache_ = vk::ValidationCacheEXT() ) VULKAN_HPP_NOEXCEPT
- : validationCache( validationCache_ )
- {}
-
- ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>(this) = rhs;
- }
-
- ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
- const void* pNext = nullptr;
- vk::ValidationCacheEXT validationCache;
- };
- static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ShaderModuleValidationCacheCreateInfoEXT : public layout::ShaderModuleValidationCacheCreateInfoEXT
+ struct ShaderModuleValidationCacheCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( vk::ValidationCacheEXT validationCache_ = vk::ValidationCacheEXT() ) VULKAN_HPP_NOEXCEPT
- : layout::ShaderModuleValidationCacheCreateInfoEXT( validationCache_ )
+ VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT
+ : validationCache( validationCache_ )
{}
+ VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ShaderModuleValidationCacheCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ShaderModuleValidationCacheCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -61907,7 +58008,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( vk::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
+ ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
{
validationCache = validationCache_;
return *this;
@@ -61935,25 +58036,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ShaderModuleValidationCacheCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
};
static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
struct ShaderResourceUsageAMD
{
- ShaderResourceUsageAMD() VULKAN_HPP_NOEXCEPT
+ ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {},
+ uint32_t numUsedSgprs_ = {},
+ uint32_t ldsSizePerLocalWorkGroup_ = {},
+ size_t ldsUsageSizeInBytes_ = {},
+ size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT
+ : numUsedVgprs( numUsedVgprs_ )
+ , numUsedSgprs( numUsedSgprs_ )
+ , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ )
+ , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ )
+ , scratchMemUsageInBytes( scratchMemUsageInBytes_ )
{}
ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkShaderResourceUsageAMD*>(this) = rhs;
+ *this = rhs;
}
ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkShaderResourceUsageAMD*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>(&rhs);
return *this;
}
@@ -61982,28 +58094,43 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t numUsedVgprs;
- uint32_t numUsedSgprs;
- uint32_t ldsSizePerLocalWorkGroup;
- size_t ldsUsageSizeInBytes;
- size_t scratchMemUsageInBytes;
+ uint32_t numUsedVgprs = {};
+ uint32_t numUsedSgprs = {};
+ uint32_t ldsSizePerLocalWorkGroup = {};
+ size_t ldsUsageSizeInBytes = {};
+ size_t scratchMemUsageInBytes = {};
};
static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
struct ShaderStatisticsInfoAMD
{
- ShaderStatisticsInfoAMD() VULKAN_HPP_NOEXCEPT
- {}
+ ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {},
+ VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {},
+ uint32_t numPhysicalVgprs_ = {},
+ uint32_t numPhysicalSgprs_ = {},
+ uint32_t numAvailableVgprs_ = {},
+ uint32_t numAvailableSgprs_ = {},
+ std::array<uint32_t,3> const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
+ : shaderStageMask( shaderStageMask_ )
+ , resourceUsage( resourceUsage_ )
+ , numPhysicalVgprs( numPhysicalVgprs_ )
+ , numPhysicalSgprs( numPhysicalSgprs_ )
+ , numAvailableVgprs( numAvailableVgprs_ )
+ , numAvailableSgprs( numAvailableSgprs_ )
+ , computeWorkGroupSize{}
+ {
+ VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy<uint32_t,3,3>::copy( computeWorkGroupSize, computeWorkGroupSize_ );
+ }
ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkShaderStatisticsInfoAMD*>(this) = rhs;
+ *this = rhs;
}
ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkShaderStatisticsInfoAMD*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>(&rhs);
return *this;
}
@@ -62034,57 +58161,37 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ShaderStageFlags shaderStageMask;
- vk::ShaderResourceUsageAMD resourceUsage;
- uint32_t numPhysicalVgprs;
- uint32_t numPhysicalSgprs;
- uint32_t numAvailableVgprs;
- uint32_t numAvailableSgprs;
- uint32_t computeWorkGroupSize[3];
+ VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
+ VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {};
+ uint32_t numPhysicalVgprs = {};
+ uint32_t numPhysicalSgprs = {};
+ uint32_t numAvailableVgprs = {};
+ uint32_t numAvailableSgprs = {};
+ uint32_t computeWorkGroupSize[3] = {};
};
static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SharedPresentSurfaceCapabilitiesKHR
{
- struct SharedPresentSurfaceCapabilitiesKHR
- {
- protected:
- SharedPresentSurfaceCapabilitiesKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>(this) = rhs;
- }
-
- SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
- void* pNext = nullptr;
- vk::ImageUsageFlags sharedPresentSupportedUsageFlags;
- };
- static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SharedPresentSurfaceCapabilitiesKHR : public layout::SharedPresentSurfaceCapabilitiesKHR
- {
- SharedPresentSurfaceCapabilitiesKHR() VULKAN_HPP_NOEXCEPT
- : layout::SharedPresentSurfaceCapabilitiesKHR()
+ SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
{}
+ VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) );
+ return *this;
+ }
+
SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SharedPresentSurfaceCapabilitiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SharedPresentSurfaceCapabilitiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>(&rhs);
return *this;
}
@@ -62110,25 +58217,32 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SharedPresentSurfaceCapabilitiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
};
static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
struct SparseImageFormatProperties
{
- SparseImageFormatProperties() VULKAN_HPP_NOEXCEPT
+ SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
+ VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {},
+ VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : aspectMask( aspectMask_ )
+ , imageGranularity( imageGranularity_ )
+ , flags( flags_ )
{}
SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageFormatProperties*>(this) = rhs;
+ *this = rhs;
}
SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageFormatProperties*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>(&rhs);
return *this;
}
@@ -62155,53 +58269,33 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::ImageAspectFlags aspectMask;
- vk::Extent3D imageGranularity;
- vk::SparseImageFormatFlags flags;
+ VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
+ VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
+ VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
};
static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SparseImageFormatProperties2
- {
- protected:
- SparseImageFormatProperties2() VULKAN_HPP_NOEXCEPT
- {}
-
- SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSparseImageFormatProperties2*>(this) = rhs;
- }
-
- SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSparseImageFormatProperties2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSparseImageFormatProperties2;
- void* pNext = nullptr;
- vk::SparseImageFormatProperties properties;
- };
- static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "layout struct and wrapper have different size!" );
- }
-
- struct SparseImageFormatProperties2 : public layout::SparseImageFormatProperties2
+ struct SparseImageFormatProperties2
{
- SparseImageFormatProperties2() VULKAN_HPP_NOEXCEPT
- : layout::SparseImageFormatProperties2()
+ SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
+ : properties( properties_ )
{}
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) );
+ return *this;
+ }
+
SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SparseImageFormatProperties2( rhs )
- {}
+ {
+ *this = rhs;
+ }
SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SparseImageFormatProperties2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>(&rhs);
return *this;
}
@@ -62227,25 +58321,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SparseImageFormatProperties2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
};
static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
struct SparseImageMemoryRequirements
{
- SparseImageMemoryRequirements() VULKAN_HPP_NOEXCEPT
+ SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {},
+ uint32_t imageMipTailFirstLod_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {},
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT
+ : formatProperties( formatProperties_ )
+ , imageMipTailFirstLod( imageMipTailFirstLod_ )
+ , imageMipTailSize( imageMipTailSize_ )
+ , imageMipTailOffset( imageMipTailOffset_ )
+ , imageMipTailStride( imageMipTailStride_ )
{}
SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageMemoryRequirements*>(this) = rhs;
+ *this = rhs;
}
SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSparseImageMemoryRequirements*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>(&rhs);
return *this;
}
@@ -62274,55 +58379,35 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::SparseImageFormatProperties formatProperties;
- uint32_t imageMipTailFirstLod;
- vk::DeviceSize imageMipTailSize;
- vk::DeviceSize imageMipTailOffset;
- vk::DeviceSize imageMipTailStride;
+ VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
+ uint32_t imageMipTailFirstLod = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
+ VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
};
static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SparseImageMemoryRequirements2
{
- struct SparseImageMemoryRequirements2
- {
- protected:
- SparseImageMemoryRequirements2() VULKAN_HPP_NOEXCEPT
- {}
-
- SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSparseImageMemoryRequirements2*>(this) = rhs;
- }
-
- SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSparseImageMemoryRequirements2*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
- void* pNext = nullptr;
- vk::SparseImageMemoryRequirements memoryRequirements;
- };
- static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "layout struct and wrapper have different size!" );
- }
-
- struct SparseImageMemoryRequirements2 : public layout::SparseImageMemoryRequirements2
- {
- SparseImageMemoryRequirements2() VULKAN_HPP_NOEXCEPT
- : layout::SparseImageMemoryRequirements2()
+ SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
+ : memoryRequirements( memoryRequirements_ )
{}
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 & operator=( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) );
+ return *this;
+ }
+
SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SparseImageMemoryRequirements2( rhs )
- {}
+ {
+ *this = rhs;
+ }
SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SparseImageMemoryRequirements2::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>(&rhs);
return *this;
}
@@ -62348,59 +58433,38 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SparseImageMemoryRequirements2::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
};
static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_GGP
- namespace layout
- {
- struct StreamDescriptorSurfaceCreateInfoGGP
- {
- protected:
- VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( vk::StreamDescriptorSurfaceCreateFlagsGGP flags_ = vk::StreamDescriptorSurfaceCreateFlagsGGP(),
- GgpStreamDescriptor streamDescriptor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , streamDescriptor( streamDescriptor_ )
- {}
-
- StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>(this) = rhs;
- }
-
- StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
- const void* pNext = nullptr;
- vk::StreamDescriptorSurfaceCreateFlagsGGP flags;
- GgpStreamDescriptor streamDescriptor;
- };
- static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "layout struct and wrapper have different size!" );
- }
-
- struct StreamDescriptorSurfaceCreateInfoGGP : public layout::StreamDescriptorSurfaceCreateInfoGGP
+ struct StreamDescriptorSurfaceCreateInfoGGP
{
- VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( vk::StreamDescriptorSurfaceCreateFlagsGGP flags_ = vk::StreamDescriptorSurfaceCreateFlagsGGP(),
- GgpStreamDescriptor streamDescriptor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::StreamDescriptorSurfaceCreateInfoGGP( flags_, streamDescriptor_ )
+ VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {},
+ GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , streamDescriptor( streamDescriptor_ )
{}
+ VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & operator=( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) );
+ return *this;
+ }
+
StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::StreamDescriptorSurfaceCreateInfoGGP( rhs )
- {}
+ {
+ *this = rhs;
+ }
StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::StreamDescriptorSurfaceCreateInfoGGP::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>(&rhs);
return *this;
}
@@ -62410,7 +58474,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- StreamDescriptorSurfaceCreateInfoGGP & setFlags( vk::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
+ StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -62445,78 +58509,48 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::StreamDescriptorSurfaceCreateInfoGGP::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
+ GgpStreamDescriptor streamDescriptor = {};
};
static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_GGP*/
- namespace layout
+ struct SubmitInfo
{
- struct SubmitInfo
- {
- protected:
- VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = 0,
- const vk::Semaphore* pWaitSemaphores_ = nullptr,
- const vk::PipelineStageFlags* pWaitDstStageMask_ = nullptr,
- uint32_t commandBufferCount_ = 0,
- const vk::CommandBuffer* pCommandBuffers_ = nullptr,
- uint32_t signalSemaphoreCount_ = 0,
- const vk::Semaphore* pSignalSemaphores_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreCount( waitSemaphoreCount_ )
- , pWaitSemaphores( pWaitSemaphores_ )
- , pWaitDstStageMask( pWaitDstStageMask_ )
- , commandBufferCount( commandBufferCount_ )
- , pCommandBuffers( pCommandBuffers_ )
- , signalSemaphoreCount( signalSemaphoreCount_ )
- , pSignalSemaphores( pSignalSemaphores_ )
- {}
-
- SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubmitInfo*>(this) = rhs;
- }
-
- SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubmitInfo*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSubmitInfo;
- const void* pNext = nullptr;
- uint32_t waitSemaphoreCount;
- const vk::Semaphore* pWaitSemaphores;
- const vk::PipelineStageFlags* pWaitDstStageMask;
- uint32_t commandBufferCount;
- const vk::CommandBuffer* pCommandBuffers;
- uint32_t signalSemaphoreCount;
- const vk::Semaphore* pSignalSemaphores;
- };
- static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "layout struct and wrapper have different size!" );
- }
-
- struct SubmitInfo : public layout::SubmitInfo
- {
- VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = 0,
- const vk::Semaphore* pWaitSemaphores_ = nullptr,
- const vk::PipelineStageFlags* pWaitDstStageMask_ = nullptr,
- uint32_t commandBufferCount_ = 0,
- const vk::CommandBuffer* pCommandBuffers_ = nullptr,
- uint32_t signalSemaphoreCount_ = 0,
- const vk::Semaphore* pSignalSemaphores_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::SubmitInfo( waitSemaphoreCount_, pWaitSemaphores_, pWaitDstStageMask_, commandBufferCount_, pCommandBuffers_, signalSemaphoreCount_, pSignalSemaphores_ )
+ VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {},
+ const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {},
+ uint32_t commandBufferCount_ = {},
+ const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {},
+ uint32_t signalSemaphoreCount_ = {},
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreCount( waitSemaphoreCount_ )
+ , pWaitSemaphores( pWaitSemaphores_ )
+ , pWaitDstStageMask( pWaitDstStageMask_ )
+ , commandBufferCount( commandBufferCount_ )
+ , pCommandBuffers( pCommandBuffers_ )
+ , signalSemaphoreCount( signalSemaphoreCount_ )
+ , pSignalSemaphores( pSignalSemaphores_ )
{}
+ VULKAN_HPP_NAMESPACE::SubmitInfo & operator=( VULKAN_HPP_NAMESPACE::SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) - offsetof( SubmitInfo, pNext ) );
+ return *this;
+ }
+
SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SubmitInfo( rhs )
- {}
+ {
+ *this = rhs;
+ }
SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SubmitInfo::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>(&rhs);
return *this;
}
@@ -62532,13 +58566,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubmitInfo & setPWaitSemaphores( const vk::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphores = pWaitSemaphores_;
return *this;
}
- SubmitInfo & setPWaitDstStageMask( const vk::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
{
pWaitDstStageMask = pWaitDstStageMask_;
return *this;
@@ -62550,7 +58584,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubmitInfo & setPCommandBuffers( const vk::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
{
pCommandBuffers = pCommandBuffers_;
return *this;
@@ -62562,7 +58596,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SubmitInfo & setPSignalSemaphores( const vk::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
+ SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphores = pSignalSemaphores_;
return *this;
@@ -62596,327 +58630,266 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SubmitInfo::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
+ const void* pNext = {};
+ uint32_t waitSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
+ const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {};
+ uint32_t commandBufferCount = {};
+ const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {};
+ uint32_t signalSemaphoreCount = {};
+ const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
};
static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SubpassBeginInfo
{
- struct SubpassBeginInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SubpassBeginInfoKHR( vk::SubpassContents contents_ = vk::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
- : contents( contents_ )
- {}
-
- SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassBeginInfoKHR*>(this) = rhs;
- }
-
- SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassBeginInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSubpassBeginInfoKHR;
- const void* pNext = nullptr;
- vk::SubpassContents contents;
- };
- static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SubpassBeginInfoKHR : public layout::SubpassBeginInfoKHR
- {
- VULKAN_HPP_CONSTEXPR SubpassBeginInfoKHR( vk::SubpassContents contents_ = vk::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassBeginInfoKHR( contents_ )
+ VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
+ : contents( contents_ )
{}
- SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassBeginInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SubpassBeginInfo & operator=( VULKAN_HPP_NAMESPACE::SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) - offsetof( SubpassBeginInfo, pNext ) );
+ return *this;
+ }
+
+ SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo& operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SubpassBeginInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>(&rhs);
return *this;
}
- SubpassBeginInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SubpassBeginInfoKHR & setContents( vk::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
+ SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
{
contents = contents_;
return *this;
}
- operator VkSubpassBeginInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubpassBeginInfoKHR*>( this );
+ return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
}
- operator VkSubpassBeginInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubpassBeginInfoKHR*>( this );
+ return *reinterpret_cast<VkSubpassBeginInfo*>( this );
}
- bool operator==( SubpassBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
&& ( contents == rhs.contents );
}
- bool operator!=( SubpassBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SubpassBeginInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
};
- static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassBeginInfoKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct SubpassDescriptionDepthStencilResolveKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolveKHR( vk::ResolveModeFlagBitsKHR depthResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone,
- vk::ResolveModeFlagBitsKHR stencilResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone,
- const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : depthResolveMode( depthResolveMode_ )
- , stencilResolveMode( stencilResolveMode_ )
- , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
- {}
-
- SubpassDescriptionDepthStencilResolveKHR( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR*>(this) = rhs;
- }
+ static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
- SubpassDescriptionDepthStencilResolveKHR& operator=( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolveKHR;
- const void* pNext = nullptr;
- vk::ResolveModeFlagBitsKHR depthResolveMode;
- vk::ResolveModeFlagBitsKHR stencilResolveMode;
- const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment;
- };
- static_assert( sizeof( SubpassDescriptionDepthStencilResolveKHR ) == sizeof( VkSubpassDescriptionDepthStencilResolveKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SubpassDescriptionDepthStencilResolveKHR : public layout::SubpassDescriptionDepthStencilResolveKHR
+ struct SubpassDescriptionDepthStencilResolve
{
- VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolveKHR( vk::ResolveModeFlagBitsKHR depthResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone,
- vk::ResolveModeFlagBitsKHR stencilResolveMode_ = vk::ResolveModeFlagBitsKHR::eNone,
- const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassDescriptionDepthStencilResolveKHR( depthResolveMode_, stencilResolveMode_, pDepthStencilResolveAttachment_ )
+ VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
+ : depthResolveMode( depthResolveMode_ )
+ , stencilResolveMode( stencilResolveMode_ )
+ , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
{}
- SubpassDescriptionDepthStencilResolveKHR( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassDescriptionDepthStencilResolveKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve & operator=( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) - offsetof( SubpassDescriptionDepthStencilResolve, pNext ) );
+ return *this;
+ }
- SubpassDescriptionDepthStencilResolveKHR& operator=( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SubpassDescriptionDepthStencilResolveKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ SubpassDescriptionDepthStencilResolve& operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>(&rhs);
return *this;
}
- SubpassDescriptionDepthStencilResolveKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- SubpassDescriptionDepthStencilResolveKHR & setDepthResolveMode( vk::ResolveModeFlagBitsKHR depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
{
depthResolveMode = depthResolveMode_;
return *this;
}
- SubpassDescriptionDepthStencilResolveKHR & setStencilResolveMode( vk::ResolveModeFlagBitsKHR stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
{
stencilResolveMode = stencilResolveMode_;
return *this;
}
- SubpassDescriptionDepthStencilResolveKHR & setPDepthStencilResolveAttachment( const vk::AttachmentReference2KHR* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
+ SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
{
pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
return *this;
}
- operator VkSubpassDescriptionDepthStencilResolveKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolveKHR*>( this );
+ return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
}
- operator VkSubpassDescriptionDepthStencilResolveKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolveKHR*>( this );
+ return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
}
- bool operator==( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
- && vk::operator==( depthResolveMode, rhs.depthResolveMode )
- && vk::operator==( stencilResolveMode, rhs.stencilResolveMode )
+ && ( depthResolveMode == rhs.depthResolveMode )
+ && ( stencilResolveMode == rhs.stencilResolveMode )
&& ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
}
- bool operator!=( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SubpassDescriptionDepthStencilResolveKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+ VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
+ const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {};
};
- static_assert( sizeof( SubpassDescriptionDepthStencilResolveKHR ) == sizeof( VkSubpassDescriptionDepthStencilResolveKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolveKHR>::value, "struct wrapper is not a standard layout!" );
-
- namespace layout
- {
- struct SubpassEndInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SubpassEndInfoKHR() VULKAN_HPP_NOEXCEPT
- {}
-
- SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassEndInfoKHR*>(this) = rhs;
- }
-
- SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSubpassEndInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSubpassEndInfoKHR;
- const void* pNext = nullptr;
- };
- static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "layout struct and wrapper have different size!" );
- }
+ static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolve>::value, "struct wrapper is not a standard layout!" );
- struct SubpassEndInfoKHR : public layout::SubpassEndInfoKHR
+ struct SubpassEndInfo
{
- VULKAN_HPP_CONSTEXPR SubpassEndInfoKHR() VULKAN_HPP_NOEXCEPT
- : layout::SubpassEndInfoKHR()
+ VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
{}
- SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SubpassEndInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::SubpassEndInfo & operator=( VULKAN_HPP_NAMESPACE::SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) - offsetof( SubpassEndInfo, pNext ) );
+ return *this;
+ }
+
+ SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = rhs;
+ }
- SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ SubpassEndInfo& operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SubpassEndInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>(&rhs);
return *this;
}
- SubpassEndInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- operator VkSubpassEndInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkSubpassEndInfoKHR*>( this );
+ return *reinterpret_cast<const VkSubpassEndInfo*>( this );
}
- operator VkSubpassEndInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkSubpassEndInfoKHR*>( this );
+ return *reinterpret_cast<VkSubpassEndInfo*>( this );
}
- bool operator==( SubpassEndInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext );
}
- bool operator!=( SubpassEndInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::SubpassEndInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
+ const void* pNext = {};
};
- static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<SubpassEndInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SurfaceCapabilities2EXT
{
- struct SurfaceCapabilities2EXT
- {
- protected:
- SurfaceCapabilities2EXT() VULKAN_HPP_NOEXCEPT
- {}
-
- SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceCapabilities2EXT*>(this) = rhs;
- }
-
- SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceCapabilities2EXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
- void* pNext = nullptr;
- uint32_t minImageCount;
- uint32_t maxImageCount;
- vk::Extent2D currentExtent;
- vk::Extent2D minImageExtent;
- vk::Extent2D maxImageExtent;
- uint32_t maxImageArrayLayers;
- vk::SurfaceTransformFlagsKHR supportedTransforms;
- vk::SurfaceTransformFlagBitsKHR currentTransform;
- vk::CompositeAlphaFlagsKHR supportedCompositeAlpha;
- vk::ImageUsageFlags supportedUsageFlags;
- vk::SurfaceCounterFlagsEXT supportedSurfaceCounters;
- };
- static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceCapabilities2EXT : public layout::SurfaceCapabilities2EXT
- {
- SurfaceCapabilities2EXT() VULKAN_HPP_NOEXCEPT
- : layout::SurfaceCapabilities2EXT()
+ SurfaceCapabilities2EXT( uint32_t minImageCount_ = {},
+ uint32_t maxImageCount_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
+ uint32_t maxImageArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
+ : minImageCount( minImageCount_ )
+ , maxImageCount( maxImageCount_ )
+ , currentExtent( currentExtent_ )
+ , minImageExtent( minImageExtent_ )
+ , maxImageExtent( maxImageExtent_ )
+ , maxImageArrayLayers( maxImageArrayLayers_ )
+ , supportedTransforms( supportedTransforms_ )
+ , currentTransform( currentTransform_ )
+ , supportedCompositeAlpha( supportedCompositeAlpha_ )
+ , supportedUsageFlags( supportedUsageFlags_ )
+ , supportedSurfaceCounters( supportedSurfaceCounters_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) - offsetof( SurfaceCapabilities2EXT, pNext ) );
+ return *this;
+ }
+
SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceCapabilities2EXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceCapabilities2EXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>(&rhs);
return *this;
}
@@ -62941,7 +58914,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( maxImageExtent == rhs.maxImageExtent )
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
&& ( supportedTransforms == rhs.supportedTransforms )
- && vk::operator==( currentTransform, rhs.currentTransform )
+ && ( currentTransform == rhs.currentTransform )
&& ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
&& ( supportedUsageFlags == rhs.supportedUsageFlags )
&& ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
@@ -62952,25 +58925,56 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceCapabilities2EXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
+ void* pNext = {};
+ uint32_t minImageCount = {};
+ uint32_t maxImageCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+ uint32_t maxImageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
};
static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
struct SurfaceCapabilitiesKHR
{
- SurfaceCapabilitiesKHR() VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {},
+ uint32_t maxImageCount_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {},
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {},
+ uint32_t maxImageArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
+ : minImageCount( minImageCount_ )
+ , maxImageCount( maxImageCount_ )
+ , currentExtent( currentExtent_ )
+ , minImageExtent( minImageExtent_ )
+ , maxImageExtent( maxImageExtent_ )
+ , maxImageArrayLayers( maxImageArrayLayers_ )
+ , supportedTransforms( supportedTransforms_ )
+ , currentTransform( currentTransform_ )
+ , supportedCompositeAlpha( supportedCompositeAlpha_ )
+ , supportedUsageFlags( supportedUsageFlags_ )
{}
SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSurfaceCapabilitiesKHR*>(this) = rhs;
+ *this = rhs;
}
SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSurfaceCapabilitiesKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>(&rhs);
return *this;
}
@@ -62993,7 +58997,7 @@ namespace VULKAN_HPP_NAMESPACE
&& ( maxImageExtent == rhs.maxImageExtent )
&& ( maxImageArrayLayers == rhs.maxImageArrayLayers )
&& ( supportedTransforms == rhs.supportedTransforms )
- && vk::operator==( currentTransform, rhs.currentTransform )
+ && ( currentTransform == rhs.currentTransform )
&& ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
&& ( supportedUsageFlags == rhs.supportedUsageFlags );
}
@@ -63004,60 +59008,40 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- uint32_t minImageCount;
- uint32_t maxImageCount;
- vk::Extent2D currentExtent;
- vk::Extent2D minImageExtent;
- vk::Extent2D maxImageExtent;
- uint32_t maxImageArrayLayers;
- vk::SurfaceTransformFlagsKHR supportedTransforms;
- vk::SurfaceTransformFlagBitsKHR currentTransform;
- vk::CompositeAlphaFlagsKHR supportedCompositeAlpha;
- vk::ImageUsageFlags supportedUsageFlags;
+ uint32_t minImageCount = {};
+ uint32_t maxImageCount = {};
+ VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
+ VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
+ uint32_t maxImageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
};
static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct SurfaceCapabilities2KHR
{
- struct SurfaceCapabilities2KHR
- {
- protected:
- SurfaceCapabilities2KHR() VULKAN_HPP_NOEXCEPT
- {}
-
- SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceCapabilities2KHR*>(this) = rhs;
- }
-
- SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceCapabilities2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
- void* pNext = nullptr;
- vk::SurfaceCapabilitiesKHR surfaceCapabilities;
- };
- static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceCapabilities2KHR : public layout::SurfaceCapabilities2KHR
- {
- SurfaceCapabilities2KHR() VULKAN_HPP_NOEXCEPT
- : layout::SurfaceCapabilities2KHR()
+ SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT
+ : surfaceCapabilities( surfaceCapabilities_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) - offsetof( SurfaceCapabilities2KHR, pNext ) );
+ return *this;
+ }
+
SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceCapabilities2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceCapabilities2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>(&rhs);
return *this;
}
@@ -63083,55 +59067,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceCapabilities2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
};
static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
+ struct SurfaceCapabilitiesFullScreenExclusiveEXT
{
- struct SurfaceCapabilitiesFullScreenExclusiveEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( vk::Bool32 fullScreenExclusiveSupported_ = 0 ) VULKAN_HPP_NOEXCEPT
- : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
- {}
-
- SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>(this) = rhs;
- }
-
- SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
- void* pNext = nullptr;
- vk::Bool32 fullScreenExclusiveSupported;
- };
- static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceCapabilitiesFullScreenExclusiveEXT : public layout::SurfaceCapabilitiesFullScreenExclusiveEXT
- {
- VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( vk::Bool32 fullScreenExclusiveSupported_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceCapabilitiesFullScreenExclusiveEXT( fullScreenExclusiveSupported_ )
+ VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT
+ : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) );
+ return *this;
+ }
+
SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceCapabilitiesFullScreenExclusiveEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceCapabilitiesFullScreenExclusiveEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>(&rhs);
return *this;
}
@@ -63141,7 +59106,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( vk::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
+ SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
{
fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
return *this;
@@ -63169,8 +59134,10 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceCapabilitiesFullScreenExclusiveEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
};
static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
@@ -63178,17 +59145,20 @@ namespace VULKAN_HPP_NAMESPACE
struct SurfaceFormatKHR
{
- SurfaceFormatKHR() VULKAN_HPP_NOEXCEPT
+ SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
+ : format( format_ )
+ , colorSpace( colorSpace_ )
{}
SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSurfaceFormatKHR*>(this) = rhs;
+ *this = rhs;
}
SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- *reinterpret_cast<VkSurfaceFormatKHR*>(this) = rhs;
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>(&rhs);
return *this;
}
@@ -63214,52 +59184,32 @@ namespace VULKAN_HPP_NAMESPACE
}
public:
- vk::Format format;
- vk::ColorSpaceKHR colorSpace;
+ VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
};
static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SurfaceFormat2KHR
- {
- protected:
- SurfaceFormat2KHR() VULKAN_HPP_NOEXCEPT
- {}
-
- SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceFormat2KHR*>(this) = rhs;
- }
-
- SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceFormat2KHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceFormat2KHR;
- void* pNext = nullptr;
- vk::SurfaceFormatKHR surfaceFormat;
- };
- static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceFormat2KHR : public layout::SurfaceFormat2KHR
+ struct SurfaceFormat2KHR
{
- SurfaceFormat2KHR() VULKAN_HPP_NOEXCEPT
- : layout::SurfaceFormat2KHR()
+ SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT
+ : surfaceFormat( surfaceFormat_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) - offsetof( SurfaceFormat2KHR, pNext ) );
+ return *this;
+ }
+
SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceFormat2KHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceFormat2KHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>(&rhs);
return *this;
}
@@ -63285,55 +59235,36 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceFormat2KHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
};
static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
+ struct SurfaceFullScreenExclusiveInfoEXT
{
- struct SurfaceFullScreenExclusiveInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( vk::FullScreenExclusiveEXT fullScreenExclusive_ = vk::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
- : fullScreenExclusive( fullScreenExclusive_ )
- {}
-
- SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>(this) = rhs;
- }
-
- SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
- void* pNext = nullptr;
- vk::FullScreenExclusiveEXT fullScreenExclusive;
- };
- static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceFullScreenExclusiveInfoEXT : public layout::SurfaceFullScreenExclusiveInfoEXT
- {
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( vk::FullScreenExclusiveEXT fullScreenExclusive_ = vk::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceFullScreenExclusiveInfoEXT( fullScreenExclusive_ )
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
+ : fullScreenExclusive( fullScreenExclusive_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) );
+ return *this;
+ }
+
SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceFullScreenExclusiveInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceFullScreenExclusiveInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>(&rhs);
return *this;
}
@@ -63343,7 +59274,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( vk::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
+ SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
{
fullScreenExclusive = fullScreenExclusive_;
return *this;
@@ -63371,8 +59302,10 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceFullScreenExclusiveInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
};
static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
@@ -63380,47 +59313,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct SurfaceFullScreenExclusiveWin32InfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : hmonitor( hmonitor_ )
- {}
-
- SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>(this) = rhs;
- }
-
- SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
- const void* pNext = nullptr;
- HMONITOR hmonitor;
- };
- static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceFullScreenExclusiveWin32InfoEXT : public layout::SurfaceFullScreenExclusiveWin32InfoEXT
+ struct SurfaceFullScreenExclusiveWin32InfoEXT
{
- VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceFullScreenExclusiveWin32InfoEXT( hmonitor_ )
+ VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT
+ : hmonitor( hmonitor_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) );
+ return *this;
+ }
+
SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceFullScreenExclusiveWin32InfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceFullScreenExclusiveWin32InfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>(&rhs);
return *this;
}
@@ -63458,54 +59370,35 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceFullScreenExclusiveWin32InfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
+ const void* pNext = {};
+ HMONITOR hmonitor = {};
};
static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
+ struct SurfaceProtectedCapabilitiesKHR
{
- struct SurfaceProtectedCapabilitiesKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( vk::Bool32 supportsProtected_ = 0 ) VULKAN_HPP_NOEXCEPT
- : supportsProtected( supportsProtected_ )
- {}
-
- SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>(this) = rhs;
- }
-
- SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
- const void* pNext = nullptr;
- vk::Bool32 supportsProtected;
- };
- static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SurfaceProtectedCapabilitiesKHR : public layout::SurfaceProtectedCapabilitiesKHR
- {
- VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( vk::Bool32 supportsProtected_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceProtectedCapabilitiesKHR( supportsProtected_ )
+ VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT
+ : supportsProtected( supportsProtected_ )
{}
+ VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR & operator=( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) );
+ return *this;
+ }
+
SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SurfaceProtectedCapabilitiesKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SurfaceProtectedCapabilitiesKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>(&rhs);
return *this;
}
@@ -63515,7 +59408,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SurfaceProtectedCapabilitiesKHR & setSupportsProtected( vk::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
+ SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
{
supportsProtected = supportsProtected_;
return *this;
@@ -63543,53 +59436,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SurfaceProtectedCapabilitiesKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
};
static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SwapchainCounterCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( vk::SurfaceCounterFlagsEXT surfaceCounters_ = vk::SurfaceCounterFlagsEXT() ) VULKAN_HPP_NOEXCEPT
- : surfaceCounters( surfaceCounters_ )
- {}
-
- SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>(this) = rhs;
- }
-
- SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
- const void* pNext = nullptr;
- vk::SurfaceCounterFlagsEXT surfaceCounters;
- };
- static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct SwapchainCounterCreateInfoEXT : public layout::SwapchainCounterCreateInfoEXT
+ struct SwapchainCounterCreateInfoEXT
{
- VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( vk::SurfaceCounterFlagsEXT surfaceCounters_ = vk::SurfaceCounterFlagsEXT() ) VULKAN_HPP_NOEXCEPT
- : layout::SwapchainCounterCreateInfoEXT( surfaceCounters_ )
+ VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
+ : surfaceCounters( surfaceCounters_ )
{}
+ VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SwapchainCounterCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SwapchainCounterCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -63599,7 +59473,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SwapchainCounterCreateInfoEXT & setSurfaceCounters( vk::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
{
surfaceCounters = surfaceCounters_;
return *this;
@@ -63627,113 +59501,64 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SwapchainCounterCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
};
static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SwapchainCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( vk::SwapchainCreateFlagsKHR flags_ = vk::SwapchainCreateFlagsKHR(),
- vk::SurfaceKHR surface_ = vk::SurfaceKHR(),
- uint32_t minImageCount_ = 0,
- vk::Format imageFormat_ = vk::Format::eUndefined,
- vk::ColorSpaceKHR imageColorSpace_ = vk::ColorSpaceKHR::eSrgbNonlinear,
- vk::Extent2D imageExtent_ = vk::Extent2D(),
- uint32_t imageArrayLayers_ = 0,
- vk::ImageUsageFlags imageUsage_ = vk::ImageUsageFlags(),
- vk::SharingMode imageSharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr,
- vk::SurfaceTransformFlagBitsKHR preTransform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity,
- vk::CompositeAlphaFlagBitsKHR compositeAlpha_ = vk::CompositeAlphaFlagBitsKHR::eOpaque,
- vk::PresentModeKHR presentMode_ = vk::PresentModeKHR::eImmediate,
- vk::Bool32 clipped_ = 0,
- vk::SwapchainKHR oldSwapchain_ = vk::SwapchainKHR() ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , surface( surface_ )
- , minImageCount( minImageCount_ )
- , imageFormat( imageFormat_ )
- , imageColorSpace( imageColorSpace_ )
- , imageExtent( imageExtent_ )
- , imageArrayLayers( imageArrayLayers_ )
- , imageUsage( imageUsage_ )
- , imageSharingMode( imageSharingMode_ )
- , queueFamilyIndexCount( queueFamilyIndexCount_ )
- , pQueueFamilyIndices( pQueueFamilyIndices_ )
- , preTransform( preTransform_ )
- , compositeAlpha( compositeAlpha_ )
- , presentMode( presentMode_ )
- , clipped( clipped_ )
- , oldSwapchain( oldSwapchain_ )
- {}
-
- SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSwapchainCreateInfoKHR*>(this) = rhs;
- }
-
- SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSwapchainCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
- const void* pNext = nullptr;
- vk::SwapchainCreateFlagsKHR flags;
- vk::SurfaceKHR surface;
- uint32_t minImageCount;
- vk::Format imageFormat;
- vk::ColorSpaceKHR imageColorSpace;
- vk::Extent2D imageExtent;
- uint32_t imageArrayLayers;
- vk::ImageUsageFlags imageUsage;
- vk::SharingMode imageSharingMode;
- uint32_t queueFamilyIndexCount;
- const uint32_t* pQueueFamilyIndices;
- vk::SurfaceTransformFlagBitsKHR preTransform;
- vk::CompositeAlphaFlagBitsKHR compositeAlpha;
- vk::PresentModeKHR presentMode;
- vk::Bool32 clipped;
- vk::SwapchainKHR oldSwapchain;
- };
- static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct SwapchainCreateInfoKHR : public layout::SwapchainCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( vk::SwapchainCreateFlagsKHR flags_ = vk::SwapchainCreateFlagsKHR(),
- vk::SurfaceKHR surface_ = vk::SurfaceKHR(),
- uint32_t minImageCount_ = 0,
- vk::Format imageFormat_ = vk::Format::eUndefined,
- vk::ColorSpaceKHR imageColorSpace_ = vk::ColorSpaceKHR::eSrgbNonlinear,
- vk::Extent2D imageExtent_ = vk::Extent2D(),
- uint32_t imageArrayLayers_ = 0,
- vk::ImageUsageFlags imageUsage_ = vk::ImageUsageFlags(),
- vk::SharingMode imageSharingMode_ = vk::SharingMode::eExclusive,
- uint32_t queueFamilyIndexCount_ = 0,
- const uint32_t* pQueueFamilyIndices_ = nullptr,
- vk::SurfaceTransformFlagBitsKHR preTransform_ = vk::SurfaceTransformFlagBitsKHR::eIdentity,
- vk::CompositeAlphaFlagBitsKHR compositeAlpha_ = vk::CompositeAlphaFlagBitsKHR::eOpaque,
- vk::PresentModeKHR presentMode_ = vk::PresentModeKHR::eImmediate,
- vk::Bool32 clipped_ = 0,
- vk::SwapchainKHR oldSwapchain_ = vk::SwapchainKHR() ) VULKAN_HPP_NOEXCEPT
- : layout::SwapchainCreateInfoKHR( flags_, surface_, minImageCount_, imageFormat_, imageColorSpace_, imageExtent_, imageArrayLayers_, imageUsage_, imageSharingMode_, queueFamilyIndexCount_, pQueueFamilyIndices_, preTransform_, compositeAlpha_, presentMode_, clipped_, oldSwapchain_ )
+ struct SwapchainCreateInfoKHR
+ {
+ VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {},
+ uint32_t minImageCount_ = {},
+ VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {},
+ uint32_t imageArrayLayers_ = {},
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {},
+ VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
+ uint32_t queueFamilyIndexCount_ = {},
+ const uint32_t* pQueueFamilyIndices_ = {},
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
+ VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
+ VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {},
+ VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , surface( surface_ )
+ , minImageCount( minImageCount_ )
+ , imageFormat( imageFormat_ )
+ , imageColorSpace( imageColorSpace_ )
+ , imageExtent( imageExtent_ )
+ , imageArrayLayers( imageArrayLayers_ )
+ , imageUsage( imageUsage_ )
+ , imageSharingMode( imageSharingMode_ )
+ , queueFamilyIndexCount( queueFamilyIndexCount_ )
+ , pQueueFamilyIndices( pQueueFamilyIndices_ )
+ , preTransform( preTransform_ )
+ , compositeAlpha( compositeAlpha_ )
+ , presentMode( presentMode_ )
+ , clipped( clipped_ )
+ , oldSwapchain( oldSwapchain_ )
{}
+ VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) - offsetof( SwapchainCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SwapchainCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SwapchainCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -63743,13 +59568,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SwapchainCreateInfoKHR & setFlags( vk::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
}
- SwapchainCreateInfoKHR & setSurface( vk::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
{
surface = surface_;
return *this;
@@ -63761,19 +59586,19 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SwapchainCreateInfoKHR & setImageFormat( vk::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
{
imageFormat = imageFormat_;
return *this;
}
- SwapchainCreateInfoKHR & setImageColorSpace( vk::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
{
imageColorSpace = imageColorSpace_;
return *this;
}
- SwapchainCreateInfoKHR & setImageExtent( vk::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT
{
imageExtent = imageExtent_;
return *this;
@@ -63785,13 +59610,13 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SwapchainCreateInfoKHR & setImageUsage( vk::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
{
imageUsage = imageUsage_;
return *this;
}
- SwapchainCreateInfoKHR & setImageSharingMode( vk::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
{
imageSharingMode = imageSharingMode_;
return *this;
@@ -63809,31 +59634,31 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SwapchainCreateInfoKHR & setPreTransform( vk::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
{
preTransform = preTransform_;
return *this;
}
- SwapchainCreateInfoKHR & setCompositeAlpha( vk::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
{
compositeAlpha = compositeAlpha_;
return *this;
}
- SwapchainCreateInfoKHR & setPresentMode( vk::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
{
presentMode = presentMode_;
return *this;
}
- SwapchainCreateInfoKHR & setClipped( vk::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
{
clipped = clipped_;
return *this;
}
- SwapchainCreateInfoKHR & setOldSwapchain( vk::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
{
oldSwapchain = oldSwapchain_;
return *this;
@@ -63864,8 +59689,8 @@ namespace VULKAN_HPP_NAMESPACE
&& ( imageSharingMode == rhs.imageSharingMode )
&& ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
&& ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
- && vk::operator==( preTransform, rhs.preTransform )
- && vk::operator==( compositeAlpha, rhs.compositeAlpha )
+ && ( preTransform == rhs.preTransform )
+ && ( compositeAlpha == rhs.compositeAlpha )
&& ( presentMode == rhs.presentMode )
&& ( clipped == rhs.clipped )
&& ( oldSwapchain == rhs.oldSwapchain );
@@ -63876,53 +59701,49 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SwapchainCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
+ uint32_t minImageCount = {};
+ VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
+ VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
+ VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
+ uint32_t imageArrayLayers = {};
+ VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
+ VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
+ uint32_t queueFamilyIndexCount = {};
+ const uint32_t* pQueueFamilyIndices = {};
+ VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
+ VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
+ VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
+ VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
+ VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
};
static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct SwapchainDisplayNativeHdrCreateInfoAMD
- {
- protected:
- VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( vk::Bool32 localDimmingEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : localDimmingEnable( localDimmingEnable_ )
- {}
-
- SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>(this) = rhs;
- }
-
- SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
- const void* pNext = nullptr;
- vk::Bool32 localDimmingEnable;
- };
- static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct SwapchainDisplayNativeHdrCreateInfoAMD : public layout::SwapchainDisplayNativeHdrCreateInfoAMD
+ struct SwapchainDisplayNativeHdrCreateInfoAMD
{
- VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( vk::Bool32 localDimmingEnable_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::SwapchainDisplayNativeHdrCreateInfoAMD( localDimmingEnable_ )
+ VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) VULKAN_HPP_NOEXCEPT
+ : localDimmingEnable( localDimmingEnable_ )
{}
+ VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) );
+ return *this;
+ }
+
SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::SwapchainDisplayNativeHdrCreateInfoAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::SwapchainDisplayNativeHdrCreateInfoAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>(&rhs);
return *this;
}
@@ -63932,7 +59753,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( vk::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
+ SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
{
localDimmingEnable = localDimmingEnable_;
return *this;
@@ -63960,52 +59781,34 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::SwapchainDisplayNativeHdrCreateInfoAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
};
static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct TextureLODGatherFormatPropertiesAMD
{
- struct TextureLODGatherFormatPropertiesAMD
- {
- protected:
- TextureLODGatherFormatPropertiesAMD() VULKAN_HPP_NOEXCEPT
- {}
-
- TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>(this) = rhs;
- }
-
- TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
- void* pNext = nullptr;
- vk::Bool32 supportsTextureGatherLODBiasAMD;
- };
- static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "layout struct and wrapper have different size!" );
- }
-
- struct TextureLODGatherFormatPropertiesAMD : public layout::TextureLODGatherFormatPropertiesAMD
- {
- TextureLODGatherFormatPropertiesAMD() VULKAN_HPP_NOEXCEPT
- : layout::TextureLODGatherFormatPropertiesAMD()
+ TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT
+ : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
{}
+ VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD & operator=( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) );
+ return *this;
+ }
+
TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::TextureLODGatherFormatPropertiesAMD( rhs )
- {}
+ {
+ *this = rhs;
+ }
TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::TextureLODGatherFormatPropertiesAMD::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>(&rhs);
return *this;
}
@@ -64031,109 +59834,84 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::TextureLODGatherFormatPropertiesAMD::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
+ void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
};
static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct TimelineSemaphoreSubmitInfo
{
- struct TimelineSemaphoreSubmitInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfoKHR( uint32_t waitSemaphoreValueCount_ = 0,
- const uint64_t* pWaitSemaphoreValues_ = nullptr,
- uint32_t signalSemaphoreValueCount_ = 0,
- const uint64_t* pSignalSemaphoreValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : waitSemaphoreValueCount( waitSemaphoreValueCount_ )
- , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
- , signalSemaphoreValueCount( signalSemaphoreValueCount_ )
- , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
- {}
-
- TimelineSemaphoreSubmitInfoKHR( VkTimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkTimelineSemaphoreSubmitInfoKHR*>(this) = rhs;
- }
-
- TimelineSemaphoreSubmitInfoKHR& operator=( VkTimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkTimelineSemaphoreSubmitInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfoKHR;
- const void* pNext = nullptr;
- uint32_t waitSemaphoreValueCount;
- const uint64_t* pWaitSemaphoreValues;
- uint32_t signalSemaphoreValueCount;
- const uint64_t* pSignalSemaphoreValues;
- };
- static_assert( sizeof( TimelineSemaphoreSubmitInfoKHR ) == sizeof( VkTimelineSemaphoreSubmitInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct TimelineSemaphoreSubmitInfoKHR : public layout::TimelineSemaphoreSubmitInfoKHR
- {
- VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfoKHR( uint32_t waitSemaphoreValueCount_ = 0,
- const uint64_t* pWaitSemaphoreValues_ = nullptr,
- uint32_t signalSemaphoreValueCount_ = 0,
- const uint64_t* pSignalSemaphoreValues_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::TimelineSemaphoreSubmitInfoKHR( waitSemaphoreValueCount_, pWaitSemaphoreValues_, signalSemaphoreValueCount_, pSignalSemaphoreValues_ )
+ VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {},
+ const uint64_t* pWaitSemaphoreValues_ = {},
+ uint32_t signalSemaphoreValueCount_ = {},
+ const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
+ : waitSemaphoreValueCount( waitSemaphoreValueCount_ )
+ , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
+ , signalSemaphoreValueCount( signalSemaphoreValueCount_ )
+ , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
{}
- TimelineSemaphoreSubmitInfoKHR( VkTimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::TimelineSemaphoreSubmitInfoKHR( rhs )
- {}
+ VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo & operator=( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) - offsetof( TimelineSemaphoreSubmitInfo, pNext ) );
+ return *this;
+ }
- TimelineSemaphoreSubmitInfoKHR& operator=( VkTimelineSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::TimelineSemaphoreSubmitInfoKHR::operator=(rhs);
+ *this = rhs;
+ }
+
+ TimelineSemaphoreSubmitInfo& operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>(&rhs);
return *this;
}
- TimelineSemaphoreSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
{
pNext = pNext_;
return *this;
}
- TimelineSemaphoreSubmitInfoKHR & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
waitSemaphoreValueCount = waitSemaphoreValueCount_;
return *this;
}
- TimelineSemaphoreSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pWaitSemaphoreValues = pWaitSemaphoreValues_;
return *this;
}
- TimelineSemaphoreSubmitInfoKHR & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
{
signalSemaphoreValueCount = signalSemaphoreValueCount_;
return *this;
}
- TimelineSemaphoreSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
+ TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
{
pSignalSemaphoreValues = pSignalSemaphoreValues_;
return *this;
}
- operator VkTimelineSemaphoreSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
+ operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfoKHR*>( this );
+ return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
}
- operator VkTimelineSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
+ operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
{
- return *reinterpret_cast<VkTimelineSemaphoreSubmitInfoKHR*>( this );
+ return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
}
- bool operator==( TimelineSemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return ( sType == rhs.sType )
&& ( pNext == rhs.pNext )
@@ -64143,66 +59921,46 @@ namespace VULKAN_HPP_NAMESPACE
&& ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
}
- bool operator!=( TimelineSemaphoreSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
+ bool operator!=( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
{
return !operator==( rhs );
}
- private:
- using layout::TimelineSemaphoreSubmitInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
+ const void* pNext = {};
+ uint32_t waitSemaphoreValueCount = {};
+ const uint64_t* pWaitSemaphoreValues = {};
+ uint32_t signalSemaphoreValueCount = {};
+ const uint64_t* pSignalSemaphoreValues = {};
};
- static_assert( sizeof( TimelineSemaphoreSubmitInfoKHR ) == sizeof( VkTimelineSemaphoreSubmitInfoKHR ), "struct and wrapper have different size!" );
- static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+ static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
+ static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ValidationCacheCreateInfoEXT
{
- struct ValidationCacheCreateInfoEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( vk::ValidationCacheCreateFlagsEXT flags_ = vk::ValidationCacheCreateFlagsEXT(),
- size_t initialDataSize_ = 0,
- const void* pInitialData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , initialDataSize( initialDataSize_ )
- , pInitialData( pInitialData_ )
- {}
-
- ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkValidationCacheCreateInfoEXT*>(this) = rhs;
- }
-
- ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkValidationCacheCreateInfoEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
- const void* pNext = nullptr;
- vk::ValidationCacheCreateFlagsEXT flags;
- size_t initialDataSize;
- const void* pInitialData;
- };
- static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ValidationCacheCreateInfoEXT : public layout::ValidationCacheCreateInfoEXT
- {
- VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( vk::ValidationCacheCreateFlagsEXT flags_ = vk::ValidationCacheCreateFlagsEXT(),
- size_t initialDataSize_ = 0,
- const void* pInitialData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ValidationCacheCreateInfoEXT( flags_, initialDataSize_, pInitialData_ )
+ VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {},
+ size_t initialDataSize_ = {},
+ const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , initialDataSize( initialDataSize_ )
+ , pInitialData( pInitialData_ )
{}
+ VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) );
+ return *this;
+ }
+
ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ValidationCacheCreateInfoEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ValidationCacheCreateInfoEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>(&rhs);
return *this;
}
@@ -64212,7 +59970,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ValidationCacheCreateInfoEXT & setFlags( vk::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
+ ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -64254,65 +60012,42 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ValidationCacheCreateInfoEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
+ size_t initialDataSize = {};
+ const void* pInitialData = {};
};
static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
- {
- struct ValidationFeaturesEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = 0,
- const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = nullptr,
- uint32_t disabledValidationFeatureCount_ = 0,
- const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : enabledValidationFeatureCount( enabledValidationFeatureCount_ )
- , pEnabledValidationFeatures( pEnabledValidationFeatures_ )
- , disabledValidationFeatureCount( disabledValidationFeatureCount_ )
- , pDisabledValidationFeatures( pDisabledValidationFeatures_ )
- {}
-
- ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkValidationFeaturesEXT*>(this) = rhs;
- }
-
- ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkValidationFeaturesEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eValidationFeaturesEXT;
- const void* pNext = nullptr;
- uint32_t enabledValidationFeatureCount;
- const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures;
- uint32_t disabledValidationFeatureCount;
- const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures;
- };
- static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ValidationFeaturesEXT : public layout::ValidationFeaturesEXT
+ struct ValidationFeaturesEXT
{
- VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = 0,
- const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = nullptr,
- uint32_t disabledValidationFeatureCount_ = 0,
- const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ValidationFeaturesEXT( enabledValidationFeatureCount_, pEnabledValidationFeatures_, disabledValidationFeatureCount_, pDisabledValidationFeatures_ )
+ VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {},
+ uint32_t disabledValidationFeatureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : enabledValidationFeatureCount( enabledValidationFeatureCount_ )
+ , pEnabledValidationFeatures( pEnabledValidationFeatures_ )
+ , disabledValidationFeatureCount( disabledValidationFeatureCount_ )
+ , pDisabledValidationFeatures( pDisabledValidationFeatures_ )
{}
+ VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) - offsetof( ValidationFeaturesEXT, pNext ) );
+ return *this;
+ }
+
ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ValidationFeaturesEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ValidationFeaturesEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>(&rhs);
return *this;
}
@@ -64328,7 +60063,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ValidationFeaturesEXT & setPEnabledValidationFeatures( const vk::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pEnabledValidationFeatures = pEnabledValidationFeatures_;
return *this;
@@ -64340,7 +60075,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ValidationFeaturesEXT & setPDisabledValidationFeatures( const vk::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
+ ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
{
pDisabledValidationFeatures = pDisabledValidationFeatures_;
return *this;
@@ -64371,57 +60106,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ValidationFeaturesEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
+ const void* pNext = {};
+ uint32_t enabledValidationFeatureCount = {};
+ const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {};
+ uint32_t disabledValidationFeatureCount = {};
+ const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {};
};
static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct ValidationFlagsEXT
{
- struct ValidationFlagsEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0,
- const vk::ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : disabledValidationCheckCount( disabledValidationCheckCount_ )
- , pDisabledValidationChecks( pDisabledValidationChecks_ )
- {}
-
- ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkValidationFlagsEXT*>(this) = rhs;
- }
-
- ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkValidationFlagsEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eValidationFlagsEXT;
- const void* pNext = nullptr;
- uint32_t disabledValidationCheckCount;
- const vk::ValidationCheckEXT* pDisabledValidationChecks;
- };
- static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct ValidationFlagsEXT : public layout::ValidationFlagsEXT
- {
- VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0,
- const vk::ValidationCheckEXT* pDisabledValidationChecks_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ValidationFlagsEXT( disabledValidationCheckCount_, pDisabledValidationChecks_ )
+ VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {},
+ const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT
+ : disabledValidationCheckCount( disabledValidationCheckCount_ )
+ , pDisabledValidationChecks( pDisabledValidationChecks_ )
{}
+ VULKAN_HPP_NAMESPACE::ValidationFlagsEXT & operator=( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) - offsetof( ValidationFlagsEXT, pNext ) );
+ return *this;
+ }
+
ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ValidationFlagsEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ValidationFlagsEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>(&rhs);
return *this;
}
@@ -64437,7 +60154,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ValidationFlagsEXT & setPDisabledValidationChecks( const vk::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
+ ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
{
pDisabledValidationChecks = pDisabledValidationChecks_;
return *this;
@@ -64466,59 +60183,39 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ValidationFlagsEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
+ const void* pNext = {};
+ uint32_t disabledValidationCheckCount = {};
+ const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {};
};
static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_VI_NN
- namespace layout
- {
- struct ViSurfaceCreateInfoNN
- {
- protected:
- VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( vk::ViSurfaceCreateFlagsNN flags_ = vk::ViSurfaceCreateFlagsNN(),
- void* window_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , window( window_ )
- {}
-
- ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkViSurfaceCreateInfoNN*>(this) = rhs;
- }
-
- ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkViSurfaceCreateInfoNN*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
- const void* pNext = nullptr;
- vk::ViSurfaceCreateFlagsNN flags;
- void* window;
- };
- static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "layout struct and wrapper have different size!" );
- }
-
- struct ViSurfaceCreateInfoNN : public layout::ViSurfaceCreateInfoNN
+ struct ViSurfaceCreateInfoNN
{
- VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( vk::ViSurfaceCreateFlagsNN flags_ = vk::ViSurfaceCreateFlagsNN(),
- void* window_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::ViSurfaceCreateInfoNN( flags_, window_ )
+ VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {},
+ void* window_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , window( window_ )
{}
+ VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & operator=( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) - offsetof( ViSurfaceCreateInfoNN, pNext ) );
+ return *this;
+ }
+
ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::ViSurfaceCreateInfoNN( rhs )
- {}
+ {
+ *this = rhs;
+ }
ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::ViSurfaceCreateInfoNN::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>(&rhs);
return *this;
}
@@ -64528,7 +60225,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- ViSurfaceCreateInfoNN & setFlags( vk::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
+ ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -64563,8 +60260,11 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::ViSurfaceCreateInfoNN::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
+ void* window = {};
};
static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
@@ -64572,55 +60272,30 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- namespace layout
+ struct WaylandSurfaceCreateInfoKHR
{
- struct WaylandSurfaceCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( vk::WaylandSurfaceCreateFlagsKHR flags_ = vk::WaylandSurfaceCreateFlagsKHR(),
- struct wl_display* display_ = nullptr,
- struct wl_surface* surface_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , display( display_ )
- , surface( surface_ )
- {}
-
- WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>(this) = rhs;
- }
-
- WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
- const void* pNext = nullptr;
- vk::WaylandSurfaceCreateFlagsKHR flags;
- struct wl_display* display;
- struct wl_surface* surface;
- };
- static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct WaylandSurfaceCreateInfoKHR : public layout::WaylandSurfaceCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( vk::WaylandSurfaceCreateFlagsKHR flags_ = vk::WaylandSurfaceCreateFlagsKHR(),
- struct wl_display* display_ = nullptr,
- struct wl_surface* surface_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::WaylandSurfaceCreateInfoKHR( flags_, display_, surface_ )
+ VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {},
+ struct wl_display* display_ = {},
+ struct wl_surface* surface_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , display( display_ )
+ , surface( surface_ )
{}
+ VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::WaylandSurfaceCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::WaylandSurfaceCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -64630,7 +60305,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- WaylandSurfaceCreateInfoKHR & setFlags( vk::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -64672,8 +60347,12 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::WaylandSurfaceCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
+ struct wl_display* display = {};
+ struct wl_surface* surface = {};
};
static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -64681,71 +60360,38 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct Win32KeyedMutexAcquireReleaseInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0,
- const vk::DeviceMemory* pAcquireSyncs_ = nullptr,
- const uint64_t* pAcquireKeys_ = nullptr,
- const uint32_t* pAcquireTimeouts_ = nullptr,
- uint32_t releaseCount_ = 0,
- const vk::DeviceMemory* pReleaseSyncs_ = nullptr,
- const uint64_t* pReleaseKeys_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : acquireCount( acquireCount_ )
- , pAcquireSyncs( pAcquireSyncs_ )
- , pAcquireKeys( pAcquireKeys_ )
- , pAcquireTimeouts( pAcquireTimeouts_ )
- , releaseCount( releaseCount_ )
- , pReleaseSyncs( pReleaseSyncs_ )
- , pReleaseKeys( pReleaseKeys_ )
- {}
-
- Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>(this) = rhs;
- }
-
- Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
- const void* pNext = nullptr;
- uint32_t acquireCount;
- const vk::DeviceMemory* pAcquireSyncs;
- const uint64_t* pAcquireKeys;
- const uint32_t* pAcquireTimeouts;
- uint32_t releaseCount;
- const vk::DeviceMemory* pReleaseSyncs;
- const uint64_t* pReleaseKeys;
- };
- static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct Win32KeyedMutexAcquireReleaseInfoKHR : public layout::Win32KeyedMutexAcquireReleaseInfoKHR
+ struct Win32KeyedMutexAcquireReleaseInfoKHR
{
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = 0,
- const vk::DeviceMemory* pAcquireSyncs_ = nullptr,
- const uint64_t* pAcquireKeys_ = nullptr,
- const uint32_t* pAcquireTimeouts_ = nullptr,
- uint32_t releaseCount_ = 0,
- const vk::DeviceMemory* pReleaseSyncs_ = nullptr,
- const uint64_t* pReleaseKeys_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::Win32KeyedMutexAcquireReleaseInfoKHR( acquireCount_, pAcquireSyncs_, pAcquireKeys_, pAcquireTimeouts_, releaseCount_, pReleaseSyncs_, pReleaseKeys_ )
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {},
+ const uint64_t* pAcquireKeys_ = {},
+ const uint32_t* pAcquireTimeouts_ = {},
+ uint32_t releaseCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {},
+ const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
+ : acquireCount( acquireCount_ )
+ , pAcquireSyncs( pAcquireSyncs_ )
+ , pAcquireKeys( pAcquireKeys_ )
+ , pAcquireTimeouts( pAcquireTimeouts_ )
+ , releaseCount( releaseCount_ )
+ , pReleaseSyncs( pReleaseSyncs_ )
+ , pReleaseKeys( pReleaseKeys_ )
{}
+ VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) );
+ return *this;
+ }
+
Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::Win32KeyedMutexAcquireReleaseInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::Win32KeyedMutexAcquireReleaseInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>(&rhs);
return *this;
}
@@ -64761,7 +60407,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const vk::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireSyncs = pAcquireSyncs_;
return *this;
@@ -64785,7 +60431,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const vk::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseSyncs = pReleaseSyncs_;
return *this;
@@ -64825,8 +60471,16 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::Win32KeyedMutexAcquireReleaseInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
+ const void* pNext = {};
+ uint32_t acquireCount = {};
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {};
+ const uint64_t* pAcquireKeys = {};
+ const uint32_t* pAcquireTimeouts = {};
+ uint32_t releaseCount = {};
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
+ const uint64_t* pReleaseKeys = {};
};
static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -64834,71 +60488,38 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct Win32KeyedMutexAcquireReleaseInfoNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0,
- const vk::DeviceMemory* pAcquireSyncs_ = nullptr,
- const uint64_t* pAcquireKeys_ = nullptr,
- const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr,
- uint32_t releaseCount_ = 0,
- const vk::DeviceMemory* pReleaseSyncs_ = nullptr,
- const uint64_t* pReleaseKeys_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : acquireCount( acquireCount_ )
- , pAcquireSyncs( pAcquireSyncs_ )
- , pAcquireKeys( pAcquireKeys_ )
- , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
- , releaseCount( releaseCount_ )
- , pReleaseSyncs( pReleaseSyncs_ )
- , pReleaseKeys( pReleaseKeys_ )
- {}
-
- Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>(this) = rhs;
- }
-
- Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
- const void* pNext = nullptr;
- uint32_t acquireCount;
- const vk::DeviceMemory* pAcquireSyncs;
- const uint64_t* pAcquireKeys;
- const uint32_t* pAcquireTimeoutMilliseconds;
- uint32_t releaseCount;
- const vk::DeviceMemory* pReleaseSyncs;
- const uint64_t* pReleaseKeys;
- };
- static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "layout struct and wrapper have different size!" );
- }
-
- struct Win32KeyedMutexAcquireReleaseInfoNV : public layout::Win32KeyedMutexAcquireReleaseInfoNV
+ struct Win32KeyedMutexAcquireReleaseInfoNV
{
- VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0,
- const vk::DeviceMemory* pAcquireSyncs_ = nullptr,
- const uint64_t* pAcquireKeys_ = nullptr,
- const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr,
- uint32_t releaseCount_ = 0,
- const vk::DeviceMemory* pReleaseSyncs_ = nullptr,
- const uint64_t* pReleaseKeys_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::Win32KeyedMutexAcquireReleaseInfoNV( acquireCount_, pAcquireSyncs_, pAcquireKeys_, pAcquireTimeoutMilliseconds_, releaseCount_, pReleaseSyncs_, pReleaseKeys_ )
+ VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {},
+ const uint64_t* pAcquireKeys_ = {},
+ const uint32_t* pAcquireTimeoutMilliseconds_ = {},
+ uint32_t releaseCount_ = {},
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {},
+ const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
+ : acquireCount( acquireCount_ )
+ , pAcquireSyncs( pAcquireSyncs_ )
+ , pAcquireKeys( pAcquireKeys_ )
+ , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
+ , releaseCount( releaseCount_ )
+ , pReleaseSyncs( pReleaseSyncs_ )
+ , pReleaseKeys( pReleaseKeys_ )
{}
+ VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV & operator=( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) );
+ return *this;
+ }
+
Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::Win32KeyedMutexAcquireReleaseInfoNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::Win32KeyedMutexAcquireReleaseInfoNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>(&rhs);
return *this;
}
@@ -64914,7 +60535,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const vk::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pAcquireSyncs = pAcquireSyncs_;
return *this;
@@ -64938,7 +60559,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const vk::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
+ Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
{
pReleaseSyncs = pReleaseSyncs_;
return *this;
@@ -64978,8 +60599,16 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::Win32KeyedMutexAcquireReleaseInfoNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
+ const void* pNext = {};
+ uint32_t acquireCount = {};
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {};
+ const uint64_t* pAcquireKeys = {};
+ const uint32_t* pAcquireTimeoutMilliseconds = {};
+ uint32_t releaseCount = {};
+ const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
+ const uint64_t* pReleaseKeys = {};
};
static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
@@ -64987,55 +60616,30 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
- namespace layout
- {
- struct Win32SurfaceCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( vk::Win32SurfaceCreateFlagsKHR flags_ = vk::Win32SurfaceCreateFlagsKHR(),
- HINSTANCE hinstance_ = 0,
- HWND hwnd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , hinstance( hinstance_ )
- , hwnd( hwnd_ )
- {}
-
- Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>(this) = rhs;
- }
-
- Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
- const void* pNext = nullptr;
- vk::Win32SurfaceCreateFlagsKHR flags;
- HINSTANCE hinstance;
- HWND hwnd;
- };
- static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct Win32SurfaceCreateInfoKHR : public layout::Win32SurfaceCreateInfoKHR
+ struct Win32SurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( vk::Win32SurfaceCreateFlagsKHR flags_ = vk::Win32SurfaceCreateFlagsKHR(),
- HINSTANCE hinstance_ = 0,
- HWND hwnd_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::Win32SurfaceCreateInfoKHR( flags_, hinstance_, hwnd_ )
+ VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {},
+ HINSTANCE hinstance_ = {},
+ HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , hinstance( hinstance_ )
+ , hwnd( hwnd_ )
{}
+ VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) - offsetof( Win32SurfaceCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::Win32SurfaceCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::Win32SurfaceCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -65045,7 +60649,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- Win32SurfaceCreateInfoKHR & setFlags( vk::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -65087,82 +60691,51 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::Win32SurfaceCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
+ HINSTANCE hinstance = {};
+ HWND hwnd = {};
};
static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
- namespace layout
- {
- struct WriteDescriptorSet
- {
- protected:
- VULKAN_HPP_CONSTEXPR WriteDescriptorSet( vk::DescriptorSet dstSet_ = vk::DescriptorSet(),
- uint32_t dstBinding_ = 0,
- uint32_t dstArrayElement_ = 0,
- uint32_t descriptorCount_ = 0,
- vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler,
- const vk::DescriptorImageInfo* pImageInfo_ = nullptr,
- const vk::DescriptorBufferInfo* pBufferInfo_ = nullptr,
- const vk::BufferView* pTexelBufferView_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : dstSet( dstSet_ )
- , dstBinding( dstBinding_ )
- , dstArrayElement( dstArrayElement_ )
- , descriptorCount( descriptorCount_ )
- , descriptorType( descriptorType_ )
- , pImageInfo( pImageInfo_ )
- , pBufferInfo( pBufferInfo_ )
- , pTexelBufferView( pTexelBufferView_ )
- {}
-
- WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWriteDescriptorSet*>(this) = rhs;
- }
-
- WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWriteDescriptorSet*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWriteDescriptorSet;
- const void* pNext = nullptr;
- vk::DescriptorSet dstSet;
- uint32_t dstBinding;
- uint32_t dstArrayElement;
- uint32_t descriptorCount;
- vk::DescriptorType descriptorType;
- const vk::DescriptorImageInfo* pImageInfo;
- const vk::DescriptorBufferInfo* pBufferInfo;
- const vk::BufferView* pTexelBufferView;
- };
- static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "layout struct and wrapper have different size!" );
- }
-
- struct WriteDescriptorSet : public layout::WriteDescriptorSet
- {
- VULKAN_HPP_CONSTEXPR WriteDescriptorSet( vk::DescriptorSet dstSet_ = vk::DescriptorSet(),
- uint32_t dstBinding_ = 0,
- uint32_t dstArrayElement_ = 0,
- uint32_t descriptorCount_ = 0,
- vk::DescriptorType descriptorType_ = vk::DescriptorType::eSampler,
- const vk::DescriptorImageInfo* pImageInfo_ = nullptr,
- const vk::DescriptorBufferInfo* pBufferInfo_ = nullptr,
- const vk::BufferView* pTexelBufferView_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::WriteDescriptorSet( dstSet_, dstBinding_, dstArrayElement_, descriptorCount_, descriptorType_, pImageInfo_, pBufferInfo_, pTexelBufferView_ )
+ struct WriteDescriptorSet
+ {
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {},
+ uint32_t dstBinding_ = {},
+ uint32_t dstArrayElement_ = {},
+ uint32_t descriptorCount_ = {},
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
+ const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {},
+ const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dstSet( dstSet_ )
+ , dstBinding( dstBinding_ )
+ , dstArrayElement( dstArrayElement_ )
+ , descriptorCount( descriptorCount_ )
+ , descriptorType( descriptorType_ )
+ , pImageInfo( pImageInfo_ )
+ , pBufferInfo( pBufferInfo_ )
+ , pTexelBufferView( pTexelBufferView_ )
{}
+ VULKAN_HPP_NAMESPACE::WriteDescriptorSet & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) - offsetof( WriteDescriptorSet, pNext ) );
+ return *this;
+ }
+
WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::WriteDescriptorSet( rhs )
- {}
+ {
+ *this = rhs;
+ }
WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::WriteDescriptorSet::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>(&rhs);
return *this;
}
@@ -65172,7 +60745,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- WriteDescriptorSet & setDstSet( vk::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
{
dstSet = dstSet_;
return *this;
@@ -65196,25 +60769,25 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- WriteDescriptorSet & setDescriptorType( vk::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
{
descriptorType = descriptorType_;
return *this;
}
- WriteDescriptorSet & setPImageInfo( const vk::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
{
pImageInfo = pImageInfo_;
return *this;
}
- WriteDescriptorSet & setPBufferInfo( const vk::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
{
pBufferInfo = pBufferInfo_;
return *this;
}
- WriteDescriptorSet & setPTexelBufferView( const vk::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
{
pTexelBufferView = pTexelBufferView_;
return *this;
@@ -65249,57 +60822,43 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::WriteDescriptorSet::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
+ uint32_t dstBinding = {};
+ uint32_t dstArrayElement = {};
+ uint32_t descriptorCount = {};
+ VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
+ const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
+ const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
+ const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
};
static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct WriteDescriptorSetAccelerationStructureNV
{
- struct WriteDescriptorSetAccelerationStructureNV
- {
- protected:
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0,
- const vk::AccelerationStructureNV* pAccelerationStructures_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : accelerationStructureCount( accelerationStructureCount_ )
- , pAccelerationStructures( pAccelerationStructures_ )
- {}
-
- WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>(this) = rhs;
- }
-
- WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
- const void* pNext = nullptr;
- uint32_t accelerationStructureCount;
- const vk::AccelerationStructureNV* pAccelerationStructures;
- };
- static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "layout struct and wrapper have different size!" );
- }
-
- struct WriteDescriptorSetAccelerationStructureNV : public layout::WriteDescriptorSetAccelerationStructureNV
- {
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = 0,
- const vk::AccelerationStructureNV* pAccelerationStructures_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::WriteDescriptorSetAccelerationStructureNV( accelerationStructureCount_, pAccelerationStructures_ )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {},
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
+ : accelerationStructureCount( accelerationStructureCount_ )
+ , pAccelerationStructures( pAccelerationStructures_ )
{}
+ VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) - offsetof( WriteDescriptorSetAccelerationStructureNV, pNext ) );
+ return *this;
+ }
+
WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::WriteDescriptorSetAccelerationStructureNV( rhs )
- {}
+ {
+ *this = rhs;
+ }
WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::WriteDescriptorSetAccelerationStructureNV::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>(&rhs);
return *this;
}
@@ -65315,7 +60874,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const vk::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
+ WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
{
pAccelerationStructures = pAccelerationStructures_;
return *this;
@@ -65344,57 +60903,37 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::WriteDescriptorSetAccelerationStructureNV::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
+ const void* pNext = {};
+ uint32_t accelerationStructureCount = {};
+ const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures = {};
};
static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureNV>::value, "struct wrapper is not a standard layout!" );
- namespace layout
+ struct WriteDescriptorSetInlineUniformBlockEXT
{
- struct WriteDescriptorSetInlineUniformBlockEXT
- {
- protected:
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0,
- const void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : dataSize( dataSize_ )
- , pData( pData_ )
- {}
-
- WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>(this) = rhs;
- }
-
- WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
- const void* pNext = nullptr;
- uint32_t dataSize;
- const void* pData;
- };
- static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "layout struct and wrapper have different size!" );
- }
-
- struct WriteDescriptorSetInlineUniformBlockEXT : public layout::WriteDescriptorSetInlineUniformBlockEXT
- {
- VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = 0,
- const void* pData_ = nullptr ) VULKAN_HPP_NOEXCEPT
- : layout::WriteDescriptorSetInlineUniformBlockEXT( dataSize_, pData_ )
+ VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {},
+ const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT
+ : dataSize( dataSize_ )
+ , pData( pData_ )
{}
+ VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT & operator=( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) );
+ return *this;
+ }
+
WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::WriteDescriptorSetInlineUniformBlockEXT( rhs )
- {}
+ {
+ *this = rhs;
+ }
WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::WriteDescriptorSetInlineUniformBlockEXT::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>(&rhs);
return *this;
}
@@ -65439,63 +60978,41 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::WriteDescriptorSetInlineUniformBlockEXT::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
+ const void* pNext = {};
+ uint32_t dataSize = {};
+ const void* pData = {};
};
static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<WriteDescriptorSetInlineUniformBlockEXT>::value, "struct wrapper is not a standard layout!" );
#ifdef VK_USE_PLATFORM_XCB_KHR
- namespace layout
+ struct XcbSurfaceCreateInfoKHR
{
- struct XcbSurfaceCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( vk::XcbSurfaceCreateFlagsKHR flags_ = vk::XcbSurfaceCreateFlagsKHR(),
- xcb_connection_t* connection_ = nullptr,
- xcb_window_t window_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , connection( connection_ )
- , window( window_ )
- {}
-
- XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>(this) = rhs;
- }
-
- XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
- const void* pNext = nullptr;
- vk::XcbSurfaceCreateFlagsKHR flags;
- xcb_connection_t* connection;
- xcb_window_t window;
- };
- static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct XcbSurfaceCreateInfoKHR : public layout::XcbSurfaceCreateInfoKHR
- {
- VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( vk::XcbSurfaceCreateFlagsKHR flags_ = vk::XcbSurfaceCreateFlagsKHR(),
- xcb_connection_t* connection_ = nullptr,
- xcb_window_t window_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::XcbSurfaceCreateInfoKHR( flags_, connection_, window_ )
+ VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {},
+ xcb_connection_t* connection_ = {},
+ xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , connection( connection_ )
+ , window( window_ )
{}
+ VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) - offsetof( XcbSurfaceCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::XcbSurfaceCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::XcbSurfaceCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -65505,7 +61022,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- XcbSurfaceCreateInfoKHR & setFlags( vk::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -65547,8 +61064,12 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::XcbSurfaceCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
+ xcb_connection_t* connection = {};
+ xcb_window_t window = {};
};
static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
@@ -65556,55 +61077,30 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_KHR
- namespace layout
- {
- struct XlibSurfaceCreateInfoKHR
- {
- protected:
- VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( vk::XlibSurfaceCreateFlagsKHR flags_ = vk::XlibSurfaceCreateFlagsKHR(),
- Display* dpy_ = nullptr,
- Window window_ = 0 ) VULKAN_HPP_NOEXCEPT
- : flags( flags_ )
- , dpy( dpy_ )
- , window( window_ )
- {}
-
- XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>(this) = rhs;
- }
-
- XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- {
- *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>(this) = rhs;
- return *this;
- }
-
- public:
- vk::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
- const void* pNext = nullptr;
- vk::XlibSurfaceCreateFlagsKHR flags;
- Display* dpy;
- Window window;
- };
- static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "layout struct and wrapper have different size!" );
- }
-
- struct XlibSurfaceCreateInfoKHR : public layout::XlibSurfaceCreateInfoKHR
+ struct XlibSurfaceCreateInfoKHR
{
- VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( vk::XlibSurfaceCreateFlagsKHR flags_ = vk::XlibSurfaceCreateFlagsKHR(),
- Display* dpy_ = nullptr,
- Window window_ = 0 ) VULKAN_HPP_NOEXCEPT
- : layout::XlibSurfaceCreateInfoKHR( flags_, dpy_, window_ )
+ VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {},
+ Display* dpy_ = {},
+ Window window_ = {} ) VULKAN_HPP_NOEXCEPT
+ : flags( flags_ )
+ , dpy( dpy_ )
+ , window( window_ )
{}
+ VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & operator=( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
+ {
+ memcpy( &pNext, &rhs.pNext, sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) - offsetof( XlibSurfaceCreateInfoKHR, pNext ) );
+ return *this;
+ }
+
XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
- : layout::XlibSurfaceCreateInfoKHR( rhs )
- {}
+ {
+ *this = rhs;
+ }
XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
{
- layout::XlibSurfaceCreateInfoKHR::operator=(rhs);
+ *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>(&rhs);
return *this;
}
@@ -65614,7 +61110,7 @@ namespace VULKAN_HPP_NAMESPACE
return *this;
}
- XlibSurfaceCreateInfoKHR & setFlags( vk::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
+ XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
{
flags = flags_;
return *this;
@@ -65656,23 +61152,27 @@ namespace VULKAN_HPP_NAMESPACE
return !operator==( rhs );
}
- private:
- using layout::XlibSurfaceCreateInfoKHR::sType;
+ public:
+ const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
+ const void* pNext = {};
+ VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
+ Display* dpy = {};
+ Window window = {};
};
static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
static_assert( std::is_standard_layout<XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result createInstance( const vk::InstanceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Instance* pInstance, Dispatch const &d)
+ VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
{
- vk::Instance instance;
+ VULKAN_HPP_NAMESPACE::Instance instance;
Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" );
}
@@ -65680,7 +61180,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Instance,Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d )
{
- vk::Instance instance;
+ VULKAN_HPP_NAMESPACE::Instance instance;
Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance*>( &instance ) ) );
ObjectDestroy<NoParent,Dispatch> deleter( allocator, d );
@@ -65690,7 +61190,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d)
+ VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
}
@@ -65742,7 +61242,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d)
+ VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
}
@@ -65794,13 +61294,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d)
+ VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d )
+ VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion(Dispatch const &d )
{
uint32_t apiVersion;
Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
@@ -65809,13 +61309,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::begin( const vk::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" );
@@ -65823,7 +61323,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const vk::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( pConditionalRenderingBegin ) );
}
@@ -65836,7 +61336,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
}
@@ -65850,13 +61350,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginQuery( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginQuery( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
}
@@ -65864,52 +61364,65 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, vk::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const vk::RenderPassBeginInfo* pRenderPassBegin, vk::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, vk::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const vk::RenderPassBeginInfo* pRenderPassBegin, const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo*>( pSubpassBeginInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo*>( &subpassBeginInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ) );
+ d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo*>( pSubpassBeginInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ) );
+ d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo*>( &subpassBeginInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize*>( pCounterBufferOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const vk::Buffer> counterBuffers, ArrayProxy<const vk::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
@@ -65924,13 +61437,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const vk::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
}
@@ -65938,13 +61451,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( vk::Buffer buffer, vk::DeviceSize offset, vk::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( vk::Buffer buffer, vk::DeviceSize offset, vk::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
}
@@ -65952,13 +61465,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( vk::PipelineBindPoint pipelineBindPoint, vk::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( vk::PipelineBindPoint pipelineBindPoint, vk::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
}
@@ -65966,26 +61479,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( vk::ImageView imageView, vk::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( vk::ImageView imageView, vk::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, const vk::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ), reinterpret_cast<const VkDeviceSize*>( pSizes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const vk::Buffer> buffers, ArrayProxy<const vk::DeviceSize> offsets, ArrayProxy<const vk::DeviceSize> sizes, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> sizes, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
@@ -66016,13 +61529,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const vk::Buffer* pBuffers, const vk::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), reinterpret_cast<const VkDeviceSize*>( pOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const vk::Buffer> buffers, ArrayProxy<const vk::DeviceSize> offsets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> offsets, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
@@ -66037,65 +61550,65 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageBlit* pRegions, vk::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::blitImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::ImageBlit> regions, vk::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const vk::AccelerationStructureInfoNV* pInfo, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, vk::Buffer instanceData, vk::DeviceSize instanceOffset, vk::Bool32 update, vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::Buffer scratch, vk::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV*>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const vk::ClearAttachment* pAttachments, uint32_t rectCount, const vk::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const vk::ClearAttachment> attachments, ArrayProxy<const vk::ClearRect> rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearColorValue* pColor, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( vk::Image image, vk::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const vk::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const vk::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const vk::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( vk::Image image, vk::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const vk::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
}
@@ -66103,65 +61616,65 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( vk::AccelerationStructureNV dst, vk::AccelerationStructureNV src, vk::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeNV>( mode ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( vk::Buffer srcBuffer, vk::Buffer dstBuffer, ArrayProxy<const vk::BufferCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( vk::Buffer srcBuffer, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::ImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, uint32_t regionCount, const vk::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Buffer dstBuffer, ArrayProxy<const vk::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
}
@@ -66169,20 +61682,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
}
@@ -66209,7 +61722,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const vk::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
}
@@ -66265,13 +61778,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( vk::Buffer buffer, vk::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( vk::Buffer buffer, vk::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
}
@@ -66307,13 +61820,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
@@ -66321,13 +61834,27 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
@@ -66335,13 +61862,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
@@ -66349,13 +61876,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
@@ -66363,13 +61890,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, vk::Buffer counterBuffer, vk::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, vk::Buffer counterBuffer, vk::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
}
@@ -66377,13 +61904,27 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
@@ -66391,13 +61932,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
@@ -66405,13 +61946,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( vk::Buffer buffer, vk::DeviceSize offset, vk::Buffer countBuffer, vk::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
}
@@ -66419,13 +61960,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( vk::Buffer buffer, vk::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
}
@@ -66475,13 +62016,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQuery( vk::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQuery( vk::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
}
@@ -66489,13 +62030,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( vk::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
}
@@ -66516,26 +62057,39 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
+ d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo*>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
+ d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo*>( &subpassEndInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const vk::Buffer* pCounterBuffers, const vk::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo*>( pSubpassEndInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo*>( &subpassEndInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer*>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize*>( pCounterBufferOffsets ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const vk::Buffer> counterBuffers, ArrayProxy<const vk::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> counterBufferOffsets, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() );
@@ -66550,13 +62104,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const vk::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
@@ -66564,20 +62118,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
}
@@ -66591,46 +62145,59 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( vk::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( vk::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const vk::SubpassBeginInfoKHR* pSubpassBeginInfo, const vk::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo*>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo*>( pSubpassEndInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo*>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo*>( &subpassEndInfo ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( pSubpassEndInfo ) );
+ d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo*>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo*>( pSubpassEndInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfoKHR*>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfoKHR*>( &subpassEndInfo ) );
+ d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo*>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo*>( &subpassEndInfo ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, vk::DependencyFlags dependencyFlags, ArrayProxy<const vk::MemoryBarrier> memoryBarriers, ArrayProxy<const vk::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const vk::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const vk::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
}
@@ -66643,26 +62210,26 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushConstants( vk::PipelineLayout layout, vk::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( vk::PipelineBindPoint pipelineBindPoint, vk::PipelineLayout layout, uint32_t set, ArrayProxy<const vk::WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ) );
}
@@ -66670,20 +62237,20 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, vk::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, vk::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const vk::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
}
@@ -66697,13 +62264,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
@@ -66711,26 +62278,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, uint32_t regionCount, const vk::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::resolveImage( vk::Image srcImage, vk::ImageLayout srcImageLayout, vk::Image dstImage, vk::ImageLayout dstImageLayout, ArrayProxy<const vk::ImageResolve> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
}
@@ -66765,13 +62332,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const vk::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( pCustomSampleOrders ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( vk::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const vk::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size() , reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( customSampleOrders.data() ) );
}
@@ -66834,13 +62401,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const vk::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D*>( pDiscardRectangles ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const vk::Rect2D> discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast<const VkRect2D*>( discardRectangles.data() ) );
}
@@ -66848,26 +62415,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setEvent( vk::Event event, vk::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const vk::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D*>( pExclusiveScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const vk::Rect2D> exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast<const VkRect2D*>( exclusiveScissors.data() ) );
}
@@ -66902,13 +62469,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const vk::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( &markerInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" );
@@ -66916,13 +62483,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const vk::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( pOverrideInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( &overrideInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" );
@@ -66930,13 +62497,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const vk::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( pMarkerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( &markerInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" );
@@ -66944,7 +62511,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const vk::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT*>( pSampleLocationsInfo ) );
}
@@ -66957,13 +62524,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const vk::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const vk::Rect2D> scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
}
@@ -66971,13 +62538,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( vk::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( vk::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
}
@@ -66985,13 +62552,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( vk::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( vk::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
}
@@ -66999,52 +62566,52 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( vk::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( vk::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const vk::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const vk::Viewport> viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV*>( pShadingRatePalettes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const vk::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast<const VkShadingRatePaletteNV*>( shadingRatePalettes.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const vk::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV*>( pViewportWScalings ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const vk::ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast<const VkViewportWScalingNV*>( viewportWScalings.data() ) );
}
@@ -67052,52 +62619,52 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( vk::Buffer raygenShaderBindingTableBuffer, vk::DeviceSize raygenShaderBindingOffset, vk::Buffer missShaderBindingTableBuffer, vk::DeviceSize missShaderBindingOffset, vk::DeviceSize missShaderBindingStride, vk::Buffer hitShaderBindingTableBuffer, vk::DeviceSize hitShaderBindingOffset, vk::DeviceSize hitShaderBindingStride, vk::Buffer callableShaderBindingTableBuffer, vk::DeviceSize callableShaderBindingOffset, vk::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( vk::Buffer raygenShaderBindingTableBuffer, vk::DeviceSize raygenShaderBindingOffset, vk::Buffer missShaderBindingTableBuffer, vk::DeviceSize missShaderBindingOffset, vk::DeviceSize missShaderBindingStride, vk::Buffer hitShaderBindingTableBuffer, vk::DeviceSize hitShaderBindingOffset, vk::DeviceSize hitShaderBindingStride, vk::Buffer callableShaderBindingTableBuffer, vk::DeviceSize callableShaderBindingOffset, vk::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, vk::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( vk::Buffer dstBuffer, vk::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const vk::Event* pEvents, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const vk::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const vk::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const vk::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const vk::Event> events, vk::PipelineStageFlags srcStageMask, vk::PipelineStageFlags dstStageMask, ArrayProxy<const vk::MemoryBarrier> memoryBarriers, ArrayProxy<const vk::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const vk::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const vk::AccelerationStructureNV* pAccelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV*>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const vk::AccelerationStructureNV> accelerationStructures, vk::QueryType queryType, vk::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureNV*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
}
@@ -67105,13 +62672,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( vk::PipelineStageFlagBits pipelineStage, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( vk::PipelineStageFlagBits pipelineStage, vk::Buffer dstBuffer, vk::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
}
@@ -67119,13 +62686,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( vk::PipelineStageFlagBits pipelineStage, vk::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( vk::PipelineStageFlagBits pipelineStage, vk::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
}
@@ -67133,13 +62700,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const
+ VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end(Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" );
@@ -67148,13 +62715,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result CommandBuffer::reset( vk::CommandBufferResetFlags flags, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::reset( vk::CommandBufferResetFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" );
@@ -67164,13 +62731,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" );
@@ -67179,7 +62746,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const vk::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR*>( pAcquireInfo ), pImageIndex ) );
}
@@ -67194,13 +62761,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( vk::SwapchainKHR swapchain, uint64_t timeout, vk::Semaphore semaphore, vk::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
uint32_t imageIndex;
Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
@@ -67209,22 +62776,36 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const vk::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, vk::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( pAcquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( pConfiguration ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const
{
- vk::PerformanceConfigurationINTEL configuration;
+ VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL*>( &configuration ) ) );
return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING"::Device::acquirePerformanceConfigurationINTEL" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const vk::CommandBufferAllocateInfo* pAllocateInfo, vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( pInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( &info ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireProfilingLockKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
}
@@ -67252,7 +62833,7 @@ namespace VULKAN_HPP_NAMESPACE
commandBuffers.reserve( allocateInfo.commandBufferCount );
CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle<CommandBuffer, Dispatch> ) - sizeof( CommandBuffer ) ) );
Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
@@ -67271,7 +62852,7 @@ namespace VULKAN_HPP_NAMESPACE
commandBuffers.reserve( allocateInfo.commandBufferCount );
CommandBuffer* buffer = reinterpret_cast<CommandBuffer*>( reinterpret_cast<char*>( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle<CommandBuffer, Dispatch> ) - sizeof( CommandBuffer ) ) );
Result result = static_cast<Result>(d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
PoolFree<Device,CommandPool,Dispatch> deleter( *this, allocateInfo.commandPool, d );
for ( size_t i=0 ; i<allocateInfo.commandBufferCount ; i++ )
@@ -67286,7 +62867,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const vk::DescriptorSetAllocateInfo* pAllocateInfo, vk::DescriptorSet* pDescriptorSets, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
}
@@ -67314,7 +62895,7 @@ namespace VULKAN_HPP_NAMESPACE
descriptorSets.reserve( allocateInfo.descriptorSetCount );
DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle<DescriptorSet, Dispatch> ) - sizeof( DescriptorSet ) ) );
Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
@@ -67333,7 +62914,7 @@ namespace VULKAN_HPP_NAMESPACE
descriptorSets.reserve( allocateInfo.descriptorSetCount );
DescriptorSet* buffer = reinterpret_cast<DescriptorSet*>( reinterpret_cast<char*>( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle<DescriptorSet, Dispatch> ) - sizeof( DescriptorSet ) ) );
Result result = static_cast<Result>(d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
PoolFree<Device,DescriptorPool,Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
for ( size_t i=0 ; i<allocateInfo.descriptorSetCount ; i++ )
@@ -67348,15 +62929,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::allocateMemory( const vk::MemoryAllocateInfo* pAllocateInfo, const vk::AllocationCallbacks* pAllocator, vk::DeviceMemory* pMemory, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DeviceMemory memory;
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory;
Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" );
}
@@ -67364,7 +62945,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DeviceMemory,Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DeviceMemory memory;
+ VULKAN_HPP_NAMESPACE::DeviceMemory memory;
Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
ObjectFree<Device,Dispatch> deleter( *this, allocator, d );
@@ -67374,13 +62955,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const vk::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const vk::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" );
@@ -67389,13 +62970,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory( vk::Buffer buffer, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" );
@@ -67403,13 +62984,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const vk::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" );
@@ -67417,13 +62998,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const vk::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const vk::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindBufferMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" );
@@ -67432,13 +63013,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory( vk::Image image, vk::DeviceMemory memory, vk::DeviceSize memoryOffset, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" );
@@ -67446,13 +63027,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const vk::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" );
@@ -67460,13 +63041,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const vk::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo*>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const vk::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> bindInfos, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size() , reinterpret_cast<const VkBindImageMemoryInfo*>( bindInfos.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" );
@@ -67475,13 +63056,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::compileDeferredNV( vk::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" );
@@ -67489,15 +63070,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const vk::AccelerationStructureCreateInfoNV* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkAccelerationStructureNV*>( pAccelerationStructure ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::AccelerationStructureNV>::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::AccelerationStructureNV accelerationStructure;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" );
}
@@ -67505,7 +63086,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<AccelerationStructureNV,Dispatch>>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::AccelerationStructureNV accelerationStructure;
+ VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV*>( &accelerationStructure ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67515,15 +63096,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createBuffer( const vk::BufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Buffer* pBuffer, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Buffer buffer;
+ VULKAN_HPP_NAMESPACE::Buffer buffer;
Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" );
}
@@ -67531,7 +63112,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Buffer,Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Buffer buffer;
+ VULKAN_HPP_NAMESPACE::Buffer buffer;
Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer*>( &buffer ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67541,15 +63122,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createBufferView( const vk::BufferViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::BufferView* pView, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::BufferView view;
+ VULKAN_HPP_NAMESPACE::BufferView view;
Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" );
}
@@ -67557,7 +63138,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<BufferView,Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::BufferView view;
+ VULKAN_HPP_NAMESPACE::BufferView view;
Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView*>( &view ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67567,15 +63148,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createCommandPool( const vk::CommandPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::CommandPool* pCommandPool, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::CommandPool commandPool;
+ VULKAN_HPP_NAMESPACE::CommandPool commandPool;
Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" );
}
@@ -67583,7 +63164,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<CommandPool,Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::CommandPool commandPool;
+ VULKAN_HPP_NAMESPACE::CommandPool commandPool;
Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67593,27 +63174,27 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createComputePipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::ComputePipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createComputePipeline( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<Pipeline>::type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
@@ -67621,14 +63202,14 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
pipelines.reserve( createInfos.size() );
Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -67640,14 +63221,14 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
pipelines.reserve( createInfos.size() );
Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
Result result = static_cast<Result>(d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -67659,7 +63240,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createComputePipelineUnique( vk::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
@@ -67671,15 +63252,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorPool( const vk::DescriptorPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorPool* pDescriptorPool, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorPool descriptorPool;
+ VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" );
}
@@ -67687,7 +63268,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorPool,Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorPool descriptorPool;
+ VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67697,15 +63278,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorSetLayout* pSetLayout, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorSetLayout setLayout;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" );
}
@@ -67713,7 +63294,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorSetLayout,Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorSetLayout setLayout;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67723,15 +63304,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" );
}
@@ -67739,7 +63320,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67749,15 +63330,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const vk::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate*>( pDescriptorUpdateTemplate ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" );
}
@@ -67765,7 +63346,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DescriptorUpdateTemplate,Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DescriptorUpdateTemplate descriptorUpdateTemplate;
+ VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate*>( &descriptorUpdateTemplate ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67775,15 +63356,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createEvent( const vk::EventCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Event* pEvent, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Event event;
+ VULKAN_HPP_NAMESPACE::Event event;
Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" );
}
@@ -67791,7 +63372,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Event,Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Event event;
+ VULKAN_HPP_NAMESPACE::Event event;
Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent*>( &event ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67801,15 +63382,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createFence( const vk::FenceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Fence fence;
+ VULKAN_HPP_NAMESPACE::Fence fence;
Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" );
}
@@ -67817,7 +63398,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Fence fence;
+ VULKAN_HPP_NAMESPACE::Fence fence;
Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67827,15 +63408,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createFramebuffer( const vk::FramebufferCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Framebuffer* pFramebuffer, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Framebuffer framebuffer;
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" );
}
@@ -67843,7 +63424,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Framebuffer,Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Framebuffer framebuffer;
+ VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67853,27 +63434,27 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::GraphicsPipelineCreateInfo* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createGraphicsPipeline( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<Pipeline>::type Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
@@ -67881,14 +63462,14 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
pipelines.reserve( createInfos.size() );
Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -67900,14 +63481,14 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
pipelines.reserve( createInfos.size() );
Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
Result result = static_cast<Result>(d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -67919,7 +63500,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createGraphicsPipelineUnique( vk::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
@@ -67931,15 +63512,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createImage( const vk::ImageCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Image* pImage, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Image image;
+ VULKAN_HPP_NAMESPACE::Image image;
Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" );
}
@@ -67947,7 +63528,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Image,Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Image image;
+ VULKAN_HPP_NAMESPACE::Image image;
Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage*>( &image ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67957,15 +63538,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createImageView( const vk::ImageViewCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ImageView* pView, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ImageView view;
+ VULKAN_HPP_NAMESPACE::ImageView view;
Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" );
}
@@ -67973,7 +63554,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ImageView,Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ImageView view;
+ VULKAN_HPP_NAMESPACE::ImageView view;
Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView*>( &view ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -67983,15 +63564,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const vk::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout;
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" );
}
@@ -67999,7 +63580,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<IndirectCommandsLayoutNVX,Dispatch>>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::IndirectCommandsLayoutNVX indirectCommandsLayout;
+ VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout;
Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68009,15 +63590,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const vk::ObjectTableCreateInfoNVX* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ObjectTableNVX* pObjectTable, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ObjectTableNVX objectTable;
+ VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" );
}
@@ -68025,7 +63606,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ObjectTableNVX,Dispatch>>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ObjectTableNVX objectTable;
+ VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable;
Result result = static_cast<Result>( d.vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68035,15 +63616,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createPipelineCache( const vk::PipelineCacheCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineCache* pPipelineCache, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::PipelineCache pipelineCache;
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" );
}
@@ -68051,7 +63632,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineCache,Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::PipelineCache pipelineCache;
+ VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68061,15 +63642,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createPipelineLayout( const vk::PipelineLayoutCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::PipelineLayout* pPipelineLayout, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::PipelineLayout pipelineLayout;
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" );
}
@@ -68077,7 +63658,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<PipelineLayout,Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::PipelineLayout pipelineLayout;
+ VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68087,15 +63668,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createQueryPool( const vk::QueryPoolCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::QueryPool* pQueryPool, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::QueryPool queryPool;
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool;
Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" );
}
@@ -68103,7 +63684,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<QueryPool,Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::QueryPool queryPool;
+ VULKAN_HPP_NAMESPACE::QueryPool queryPool;
Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68113,27 +63694,27 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, uint32_t createInfoCount, const vk::RayTracingPipelineCreateInfoNV* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::Pipeline* pPipelines, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Pipeline,Allocator> pipelines( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createRayTracingPipelineNV( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<Pipeline>::type Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
@@ -68141,14 +63722,14 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines;
pipelines.reserve( createInfos.size() );
Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -68160,14 +63741,14 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( vk::PipelineCache pipelineCache, ArrayProxy<const vk::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<Pipeline,Dispatch>,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle<Pipeline, Dispatch> ), "Pipeline is greater than UniqueHandle<Pipeline, Dispatch>!" );
std::vector<UniqueHandle<Pipeline, Dispatch>, Allocator> pipelines( vectorAllocator );
pipelines.reserve( createInfos.size() );
Pipeline* buffer = reinterpret_cast<Pipeline*>( reinterpret_cast<char*>( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle<Pipeline, Dispatch> ) - sizeof( Pipeline ) ) );
Result result = static_cast<Result>(d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -68179,7 +63760,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createRayTracingPipelineNVUnique( vk::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Pipeline,Dispatch>>::type Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
Pipeline pipeline;
Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
@@ -68191,15 +63772,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRenderPass( const vk::RenderPassCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::RenderPass renderPass;
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" );
}
@@ -68207,7 +63788,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::RenderPass renderPass;
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68217,24 +63798,50 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const vk::RenderPassCreateInfo2KHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::RenderPass* pRenderPass, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+ return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2" );
+ }
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<RenderPass,Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2Unique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
+ return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::RenderPass renderPass;
- Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" );
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<RenderPass,Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::RenderPass renderPass;
- Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2KHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
+ VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+ Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
return createResultValue<RenderPass,Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter );
@@ -68243,15 +63850,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSampler( const vk::SamplerCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Sampler* pSampler, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Sampler sampler;
+ VULKAN_HPP_NAMESPACE::Sampler sampler;
Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" );
}
@@ -68259,7 +63866,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Sampler,Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Sampler sampler;
+ VULKAN_HPP_NAMESPACE::Sampler sampler;
Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler*>( &sampler ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68269,15 +63876,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SamplerYcbcrConversion ycbcrConversion;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" );
}
@@ -68285,7 +63892,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SamplerYcbcrConversion ycbcrConversion;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68295,15 +63902,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const vk::SamplerYcbcrConversionCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion*>( pYcbcrConversion ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SamplerYcbcrConversion ycbcrConversion;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" );
}
@@ -68311,7 +63918,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SamplerYcbcrConversion,Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SamplerYcbcrConversion ycbcrConversion;
+ VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion*>( &ycbcrConversion ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68321,15 +63928,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSemaphore( const vk::SemaphoreCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Semaphore* pSemaphore, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Semaphore semaphore;
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore;
Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" );
}
@@ -68337,7 +63944,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Semaphore,Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Semaphore semaphore;
+ VULKAN_HPP_NAMESPACE::Semaphore semaphore;
Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68347,15 +63954,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createShaderModule( const vk::ShaderModuleCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ShaderModule* pShaderModule, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ShaderModule shaderModule;
+ VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" );
}
@@ -68363,7 +63970,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ShaderModule,Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ShaderModule shaderModule;
+ VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68373,27 +63980,27 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const vk::SwapchainCreateInfoKHR* pCreateInfos, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchains, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size(), vectorAllocator );
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" );
}
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
@@ -68401,14 +64008,14 @@ namespace VULKAN_HPP_NAMESPACE
}
#ifndef VULKAN_HPP_NO_SMART_HANDLE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle<SwapchainKHR, Dispatch> ), "SwapchainKHR is greater than UniqueHandle<SwapchainKHR, Dispatch>!" );
std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> swapchainKHRs;
swapchainKHRs.reserve( createInfos.size() );
SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle<SwapchainKHR, Dispatch> ) - sizeof( SwapchainKHR ) ) );
Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -68420,14 +64027,14 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const vk::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR,Dispatch>,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator, Allocator const& vectorAllocator, Dispatch const &d ) const
{
static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle<SwapchainKHR, Dispatch> ), "SwapchainKHR is greater than UniqueHandle<SwapchainKHR, Dispatch>!" );
std::vector<UniqueHandle<SwapchainKHR, Dispatch>, Allocator> swapchainKHRs( vectorAllocator );
swapchainKHRs.reserve( createInfos.size() );
SwapchainKHR* buffer = reinterpret_cast<SwapchainKHR*>( reinterpret_cast<char*>( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle<SwapchainKHR, Dispatch> ) - sizeof( SwapchainKHR ) ) );
Result result = static_cast<Result>(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( buffer ) ) );
- if (result == vk::Result::eSuccess)
+ if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
for ( size_t i=0 ; i<createInfos.size() ; i++ )
@@ -68451,15 +64058,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const vk::SwapchainCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SwapchainKHR* pSwapchain, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SwapchainKHR swapchain;
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" );
}
@@ -68467,7 +64074,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR,Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SwapchainKHR swapchain;
+ VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68477,15 +64084,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const vk::ValidationCacheCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::ValidationCacheEXT* pValidationCache, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkValidationCacheEXT*>( pValidationCache ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ValidationCacheEXT validationCache;
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" );
}
@@ -68493,7 +64100,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<ValidationCacheEXT,Dispatch>>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::ValidationCacheEXT validationCache;
+ VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT*>( &validationCache ) ) );
ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
@@ -68503,13 +64110,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const vk::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" );
@@ -68517,13 +64124,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const vk::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" );
@@ -68531,202 +64138,202 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( vk::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::AccelerationStructureNV accelerationStructure, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBuffer( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBuffer( vk::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Buffer buffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBufferView( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyBufferView( vk::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::BufferView bufferView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyCommandPool( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyCommandPool( vk::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::CommandPool commandPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorPool( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorPool( vk::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorPool descriptorPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( vk::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorSetLayout descriptorSetLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( vk::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
@@ -68739,481 +64346,481 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyEvent( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyEvent( vk::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Event event, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFence( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFence( vk::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Fence fence, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFramebuffer( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyFramebuffer( vk::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Framebuffer framebuffer, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImage( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImage( vk::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Image image, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImageView( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyImageView( vk::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ImageView imageView, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( vk::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ObjectTableNVX objectTable, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipeline( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipeline( vk::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Pipeline pipeline, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineCache( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineCache( vk::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::PipelineCache pipelineCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineLayout( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyPipelineLayout( vk::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::PipelineLayout pipelineLayout, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyQueryPool( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyQueryPool( vk::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::QueryPool queryPool, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyRenderPass( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyRenderPass( vk::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::RenderPass renderPass, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySampler( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySampler( vk::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Sampler sampler, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( vk::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( vk::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySemaphore( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySemaphore( vk::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Semaphore semaphore, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyShaderModule( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyShaderModule( vk::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ShaderModule shaderModule, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySwapchainKHR( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroySwapchainKHR( vk::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::SwapchainKHR swapchain, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( vk::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ValidationCacheEXT validationCache, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::destroy( vk::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
@@ -69221,13 +64828,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle(Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" );
@@ -69235,13 +64842,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( vk::DisplayKHR display, const vk::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::displayPowerControlEXT( vk::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" );
@@ -69249,13 +64856,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const vk::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" );
@@ -69263,39 +64870,39 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeCommandBuffers( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeCommandBuffers( vk::CommandPool commandPool, ArrayProxy<const vk::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( vk::CommandPool commandPool, uint32_t commandBufferCount, const vk::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( vk::CommandPool commandPool, ArrayProxy<const vk::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::freeDescriptorSets( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::freeDescriptorSets( vk::DescriptorPool descriptorPool, ArrayProxy<const vk::DescriptorSet> descriptorSets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" );
@@ -69303,13 +64910,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::free( vk::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const vk::DescriptorSet* pDescriptorSets, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::free( vk::DescriptorPool descriptorPool, ArrayProxy<const vk::DescriptorSet> descriptorSets, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" );
@@ -69317,39 +64924,39 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeMemory( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::freeMemory( vk::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( vk::DeviceMemory memory, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::free( vk::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::getAccelerationStructureHandleNV( vk::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> data, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" );
@@ -69357,15 +64964,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const vk::AccelerationStructureMemoryRequirementsInfoNV* pInfo, vk::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( pInfo ), reinterpret_cast<VkMemoryRequirements2KHR*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements2KHR memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
return memoryRequirements;
}
@@ -69373,7 +64980,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::MemoryRequirements2KHR& memoryRequirements = structureChain.template get<vk::MemoryRequirements2KHR>();
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( &info ), reinterpret_cast<VkMemoryRequirements2KHR*>( &memoryRequirements ) );
return structureChain;
}
@@ -69381,15 +64988,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, vk::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
{
- vk::AndroidHardwareBufferPropertiesANDROID properties;
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
}
@@ -69397,7 +65004,7 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
- vk::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get<vk::AndroidHardwareBufferPropertiesANDROID>();
+ VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( &properties ) ) );
return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
}
@@ -69405,43 +65012,69 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const vk::BufferDeviceAddressInfoEXT* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( pInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( pInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoEXT*>( pInfo ) ) );
+ return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( pInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfoEXT & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfoEXT*>( &info ) );
+ return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( &info ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( vk::Buffer buffer, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements Device::getBufferMemoryRequirements( vk::Buffer buffer, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
return memoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements2 memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return memoryRequirements;
}
@@ -69449,22 +65082,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::MemoryRequirements2& memoryRequirements = structureChain.template get<vk::MemoryRequirements2>();
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const vk::BufferMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements2 memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return memoryRequirements;
}
@@ -69472,20 +65105,46 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::MemoryRequirements2& memoryRequirements = structureChain.template get<vk::MemoryRequirements2>();
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const vk::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const vk::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> timestampInfos, ArrayProxy<uint64_t> timestamps, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
@@ -69502,15 +65161,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::DescriptorSetLayoutSupport support;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
return support;
}
@@ -69518,22 +65177,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::DescriptorSetLayoutSupport& support = structureChain.template get<vk::DescriptorSetLayoutSupport>();
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const vk::DescriptorSetLayoutCreateInfo* pCreateInfo, vk::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( pSupport ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::DescriptorSetLayoutSupport support;
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
return support;
}
@@ -69541,52 +65200,52 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::DescriptorSetLayoutSupport& support = structureChain.template get<vk::DescriptorSetLayoutSupport>();
+ VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport*>( &support ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PeerMemoryFeatureFlags peerMemoryFeatures;
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
return peerMemoryFeatures;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, vk::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( pPeerMemoryFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PeerMemoryFeatureFlags peerMemoryFeatures;
+ VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags*>( &peerMemoryFeatures ) );
return peerMemoryFeatures;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( vk::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( pDeviceGroupPresentCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const
{
- vk::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( &deviceGroupPresentCapabilities ) ) );
return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" );
}
@@ -69594,15 +65253,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
- vk::DeviceGroupPresentModeFlagsKHR modes;
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModes2EXT" );
}
@@ -69610,36 +65269,62 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, vk::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( pModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
- vk::DeviceGroupPresentModeFlagsKHR modes;
+ VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR*>( &modes ) ) );
return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getMemoryCommitment( vk::DeviceMemory memory, vk::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize*>( pCommittedMemoryInBytes ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::DeviceSize Device::getMemoryCommitment( vk::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::DeviceSize committedMemoryInBytes;
+ VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize*>( &committedMemoryInBytes ) );
return committedMemoryInBytes;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( pInfo ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( &info ) );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return d.vkGetDeviceProcAddr( m_device, pName );
@@ -69653,30 +65338,30 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, vk::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::Queue queue;
+ VULKAN_HPP_NAMESPACE::Queue queue;
d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
return queue;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getQueue2( const vk::DeviceQueueInfo2* pQueueInfo, vk::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( pQueueInfo ), reinterpret_cast<VkQueue*>( pQueue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::Queue queue;
+ VULKAN_HPP_NAMESPACE::Queue queue;
d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2*>( &queueInfo ), reinterpret_cast<VkQueue*>( &queue ) );
return queue;
}
@@ -69684,13 +65369,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getEventStatus( vk::Event event, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getEventStatus( vk::Event event, Dispatch const &d ) const
+ VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
@@ -69698,13 +65383,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const vk::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
{
int fd;
Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR*>( &getFdInfo ), &fd ) );
@@ -69714,13 +65399,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceStatus( vk::Fence fence, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceStatus( vk::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
@@ -69729,13 +65414,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const vk::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
@@ -69745,45 +65430,45 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( vk::Image image, vk::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( vk::Image image, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const
{
- vk::ImageDrmFormatModifierPropertiesEXT properties;
+ VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( &properties ) ) );
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( vk::Image image, vk::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements Device::getImageMemoryRequirements( vk::Image image, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
return memoryRequirements;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements2 memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return memoryRequirements;
}
@@ -69791,22 +65476,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::MemoryRequirements2& memoryRequirements = structureChain.template get<vk::MemoryRequirements2>();
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const vk::ImageMemoryRequirementsInfo2* pInfo, vk::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( pInfo ), reinterpret_cast<VkMemoryRequirements2*>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MemoryRequirements2 memoryRequirements;
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return memoryRequirements;
}
@@ -69814,20 +65499,20 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::MemoryRequirements2& memoryRequirements = structureChain.template get<vk::MemoryRequirements2>();
+ VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( &info ), reinterpret_cast<VkMemoryRequirements2*>( &memoryRequirements ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( vk::Image image, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( vk::Image image, Dispatch const &d ) const
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
uint32_t sparseMemoryRequirementCount;
@@ -69837,7 +65522,7 @@ namespace VULKAN_HPP_NAMESPACE
return sparseMemoryRequirements;
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( vk::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements( vectorAllocator );
uint32_t sparseMemoryRequirementCount;
@@ -69849,7 +65534,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
}
@@ -69877,7 +65562,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const vk::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, vk::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2*>( pSparseMemoryRequirements ) );
}
@@ -69905,22 +65590,22 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( vk::Image image, const vk::ImageSubresource* pSubresource, vk::SubresourceLayout* pLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::SubresourceLayout Device::getImageSubresourceLayout( vk::Image image, const ImageSubresource & subresource, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::SubresourceLayout layout;
+ VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
return layout;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const vk::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX*>( pInfo ) );
}
@@ -69934,13 +65619,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const vk::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( pInfo ), pBuffer ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const
{
struct AHardwareBuffer* buffer;
Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( &info ), &buffer ) );
@@ -69950,13 +65635,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const vk::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
{
int fd;
Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR*>( &getFdInfo ), &fd ) );
@@ -69965,30 +65650,30 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, vk::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( pMemoryFdProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const
{
- vk::MemoryFdPropertiesKHR memoryFdProperties;
+ VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR*>( &memoryFdProperties ) ) );
return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, vk::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( pMemoryHostPointerProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( vk::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const
{
- vk::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
+ VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( &memoryHostPointerProperties ) ) );
return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" );
}
@@ -69996,13 +65681,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const vk::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
@@ -70013,13 +65698,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( vk::DeviceMemory memory, vk::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
@@ -70030,15 +65715,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, vk::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( pMemoryWin32HandleProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( vk::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const
{
- vk::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
+ VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( &memoryWin32HandleProperties ) ) );
return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" );
}
@@ -70046,13 +65731,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, vk::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE*>( pPresentationTimings ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings;
uint32_t presentationTimingCount;
@@ -70074,7 +65759,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE,Allocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PastPresentationTimingGOOGLE,Allocator> presentationTimings( vectorAllocator );
uint32_t presentationTimingCount;
@@ -70098,28 +65783,28 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, vk::PerformanceValueINTEL* pValue, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL*>( pValue ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( vk::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const
{
- vk::PerformanceValueINTEL value;
+ VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
Result result = static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL*>( &value ) ) );
return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getPerformanceParameterINTEL" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineCacheData( vk::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( vk::PipelineCache pipelineCache, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data;
size_t dataSize;
@@ -70141,7 +65826,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( vk::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data( vectorAllocator );
size_t dataSize;
@@ -70165,7 +65850,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, vk::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( pInternalRepresentations ) ) );
}
@@ -70217,7 +65902,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const vk::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, vk::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR*>( pPipelineInfo ), pExecutableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( pProperties ) ) );
}
@@ -70269,7 +65954,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const vk::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, vk::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR*>( pExecutableInfo ), pStatisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR*>( pStatistics ) ) );
}
@@ -70321,13 +66006,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getQueryPoolResults( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, vk::DeviceSize stride, vk::QueryResultFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
@@ -70335,13 +66020,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename T, typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( vk::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> data, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" );
@@ -70349,43 +66034,58 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, vk::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( pDisplayTimingProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( vk::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
- vk::RefreshCycleDurationGOOGLE displayTimingProperties;
+ VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( &displayTimingProperties ) ) );
return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( vk::RenderPass renderPass, vk::Extent2D* pGranularity, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::Extent2D Device::getRenderAreaGranularity( vk::RenderPass renderPass, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::Extent2D granularity;
+ VULKAN_HPP_NAMESPACE::Extent2D granularity;
d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
return granularity;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( vk::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
+ {
+ uint64_t value;
+ Result result = static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
+ return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreCounterValue" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( vk::Semaphore semaphore, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const &d ) const
{
uint64_t value;
Result result = static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
@@ -70394,13 +66094,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const vk::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( pGetFdInfo ), pFd ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const
{
int fd;
Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( &getFdInfo ), &fd ) );
@@ -70410,13 +66110,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const vk::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( pGetWin32HandleInfo ), pHandle ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const
{
HANDLE handle;
Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( &getWin32HandleInfo ), &handle ) );
@@ -70426,13 +66126,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> info;
size_t infoSize;
@@ -70454,7 +66154,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( vk::Pipeline pipeline, vk::ShaderStageFlagBits shaderStage, vk::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> info( vectorAllocator );
size_t infoSize;
@@ -70478,13 +66178,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( vk::SwapchainKHR swapchain, vk::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const
{
uint64_t counterValue;
Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
@@ -70493,13 +66193,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( vk::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, vk::Image* pSwapchainImages, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
std::vector<Image,Allocator> swapchainImages;
uint32_t swapchainImageCount;
@@ -70521,7 +66221,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( vk::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Image,Allocator> swapchainImages( vectorAllocator );
uint32_t swapchainImageCount;
@@ -70546,13 +66246,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( vk::SwapchainKHR swapchain, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( vk::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
@@ -70560,13 +66260,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data;
size_t dataSize;
@@ -70588,7 +66288,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( vk::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<uint8_t,Allocator> data( vectorAllocator );
size_t dataSize;
@@ -70612,13 +66312,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const vk::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( pImportFenceFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR*>( &importFenceFdInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" );
@@ -70627,13 +66327,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const vk::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( pImportFenceWin32HandleInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( &importFenceWin32HandleInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" );
@@ -70642,13 +66342,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const vk::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( pImportSemaphoreFdInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( &importSemaphoreFdInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" );
@@ -70657,13 +66357,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const vk::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( pImportSemaphoreWin32HandleInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( &importSemaphoreWin32HandleInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" );
@@ -70672,13 +66372,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const vk::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( pInitializeInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( &initializeInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" );
@@ -70686,13 +66386,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const vk::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const vk::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> memoryRanges, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" );
@@ -70700,13 +66400,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags, void** ppData, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void*>::type Device::mapMemory( vk::DeviceMemory memory, vk::DeviceSize offset, vk::DeviceSize size, vk::MemoryMapFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void*>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const
{
void* pData;
Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData ) );
@@ -70715,13 +66415,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::mergePipelineCaches( vk::PipelineCache dstCache, uint32_t srcCacheCount, const vk::PipelineCache* pSrcCaches, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergePipelineCaches( vk::PipelineCache dstCache, ArrayProxy<const vk::PipelineCache> srcCaches, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> srcCaches, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" );
@@ -70729,13 +66429,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const vk::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT*>( pSrcCaches ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergeValidationCachesEXT( vk::ValidationCacheEXT dstCache, ArrayProxy<const vk::ValidationCacheEXT> srcCaches, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> srcCaches, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size() , reinterpret_cast<const VkValidationCacheEXT*>( srcCaches.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" );
@@ -70743,43 +66443,65 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::registerEventEXT( const vk::DeviceEventInfoEXT* pDeviceEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Fence fence;
+ VULKAN_HPP_NAMESPACE::Fence fence;
Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" );
}
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<Fence,Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXTUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( vk::DisplayKHR display, const vk::DisplayEventInfoEXT* pDisplayEventInfo, const vk::AllocationCallbacks* pAllocator, vk::Fence* pFence, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Fence>::type Device::registerDisplayEventEXT( vk::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Fence fence;
+ VULKAN_HPP_NAMESPACE::Fence fence;
Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" );
}
+#ifndef VULKAN_HPP_NO_SMART_HANDLE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Fence,Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ {
+ VULKAN_HPP_NAMESPACE::Fence fence;
+ Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence*>( &fence ) ) );
+
+ ObjectDestroy<Device,Dispatch> deleter( *this, allocator, d );
+ return createResultValue<Fence,Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXTUnique", deleter );
+ }
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::registerObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::registerObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy<const vk::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() );
@@ -70797,13 +66519,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( vk::SwapchainKHR swapchain, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" );
@@ -70813,13 +66535,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" );
@@ -70828,13 +66550,27 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d) const
+ VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkReleaseProfilingLockKHR( m_device );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkReleaseProfilingLockKHR( m_device );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetCommandPool( vk::CommandPool commandPool, vk::CommandPoolResetFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" );
@@ -70843,13 +66579,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetDescriptorPool( vk::DescriptorPool descriptorPool, vk::DescriptorPoolResetFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" );
@@ -70858,13 +66594,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::resetEvent( vk::Event event, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetEvent( vk::Event event, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" );
@@ -70872,13 +66608,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const vk::Fence* pFences, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetFences( ArrayProxy<const vk::Fence> fences, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" );
@@ -70887,26 +66623,40 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ }
+#else
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( vk::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const vk::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( pNameInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( &nameInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" );
@@ -70914,13 +66664,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const vk::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( pTagInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( &tagInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" );
@@ -70929,13 +66679,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::setEvent( vk::Event event, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::setEvent( vk::Event event, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" );
@@ -70943,13 +66693,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const vk::SwapchainKHR* pSwapchains, const vk::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR*>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT*>( pMetadata ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const vk::SwapchainKHR> swapchains, ArrayProxy<const vk::HdrMetadataEXT> metadata, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> metadata, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
@@ -70965,41 +66715,55 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( vk::SwapchainKHR swapChain, vk::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( vk::SwapchainKHR swapChain, vk::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const vk::SemaphoreSignalInfoKHR* pSignalInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( pSignalInfo ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( &signalInfo ) ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphore" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfoKHR*>( pSignalInfo ) ) );
+ return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( pSignalInfo ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfoKHR & signalInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const &d ) const
{
- Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfoKHR*>( &signalInfo ) ) );
+ Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo*>( &signalInfo ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::signalSemaphoreKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::trimCommandPool( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::trimCommandPool( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
@@ -71007,13 +66771,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( vk::CommandPool commandPool, vk::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
}
@@ -71035,26 +66799,26 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::unmapMemory( vk::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::unmapMemory( vk::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( vk::ObjectTableNVX objectTable, uint32_t objectCount, const vk::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Device::unregisterObjectsNVX( vk::ObjectTableNVX objectTable, ArrayProxy<const vk::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy<const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() );
@@ -71071,13 +66835,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
}
@@ -71085,39 +66849,39 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( vk::DescriptorSet descriptorSet, vk::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const vk::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const vk::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const vk::WriteDescriptorSet> descriptorWrites, ArrayProxy<const vk::CopyDescriptorSet> descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const vk::Fence* pFences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const vk::Fence> fences, vk::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
+ VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
@@ -71125,30 +66889,44 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const vk::SemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfoKHR*>( pWaitInfo ), timeout ) );
+ return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( pWaitInfo ), timeout ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfoKHR & waitInfo, uint64_t timeout, Dispatch const &d ) const
+ VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( &waitInfo ), timeout ) );
+ return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphores", { Result::eSuccess, Result::eTimeout } );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
- Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfoKHR*>( &waitInfo ), timeout ) );
+ return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( pWaitInfo ), timeout ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const &d ) const
+ {
+ Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo*>( &waitInfo ), timeout ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitSemaphoresKHR", { Result::eSuccess, Result::eTimeout } );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const vk::AndroidSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" );
}
@@ -71156,7 +66934,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71167,15 +66945,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const vk::DebugReportCallbackCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugReportCallbackEXT* pCallback, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DebugReportCallbackEXT callback;
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" );
}
@@ -71183,7 +66961,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugReportCallbackEXT,Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DebugReportCallbackEXT callback;
+ VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71193,15 +66971,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const vk::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( pMessenger ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DebugUtilsMessengerEXT messenger;
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" );
}
@@ -71209,7 +66987,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<DebugUtilsMessengerEXT,Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DebugUtilsMessengerEXT messenger;
+ VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT*>( &messenger ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71219,15 +66997,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const vk::DisplaySurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" );
}
@@ -71235,7 +67013,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71245,15 +67023,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const vk::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXT" );
}
@@ -71261,7 +67039,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71272,15 +67050,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_IOS_MVK
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const vk::IOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" );
}
@@ -71288,7 +67066,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71300,15 +67078,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_FUCHSIA
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const vk::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" );
}
@@ -71316,7 +67094,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71328,15 +67106,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_MACOS_MVK
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const vk::MacOSSurfaceCreateInfoMVK* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" );
}
@@ -71344,7 +67122,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71356,15 +67134,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_METAL_EXT
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const vk::MetalSurfaceCreateInfoEXT* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXT" );
}
@@ -71372,7 +67150,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71384,15 +67162,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_GGP
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const vk::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGP" );
}
@@ -71400,7 +67178,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71412,15 +67190,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_VI_NN
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const vk::ViSurfaceCreateInfoNN* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" );
}
@@ -71428,7 +67206,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71440,15 +67218,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const vk::WaylandSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" );
}
@@ -71456,7 +67234,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71468,15 +67246,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const vk::Win32SurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" );
}
@@ -71484,7 +67262,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71496,15 +67274,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XCB_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const vk::XcbSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" );
}
@@ -71512,7 +67290,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71524,15 +67302,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const vk::XlibSurfaceCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::SurfaceKHR* pSurface, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" );
}
@@ -71540,7 +67318,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SurfaceKHR,Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::SurfaceKHR surface;
+ VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
ObjectDestroy<Instance,Dispatch> deleter( *this, allocator, d );
@@ -71551,13 +67329,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( vk::DebugReportFlagsEXT flags, vk::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const
+ VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const
{
#ifdef VULKAN_HPP_NO_EXCEPTIONS
VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() );
@@ -71572,59 +67350,59 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( vk::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( vk::DebugReportCallbackEXT callback, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( vk::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( vk::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( vk::DebugUtilsMessengerEXT messenger, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( vk::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
@@ -71637,33 +67415,33 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( vk::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( vk::SurfaceKHR surface, const vk::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::destroy( vk::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ) );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
}
@@ -71715,7 +67493,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, vk::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties*>( pPhysicalDeviceGroupProperties ) ) );
}
@@ -71767,7 +67545,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, vk::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
}
@@ -71832,13 +67610,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const vk::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( pCallbackData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( vk::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, vk::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( &callbackData ) );
}
@@ -71846,13 +67624,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, vk::DisplayKHR display, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( vk::DisplayKHR display, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
Display dpy;
Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
@@ -71862,15 +67640,15 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const vk::DeviceCreateInfo* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::Device* pDevice, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Device device;
+ VULKAN_HPP_NAMESPACE::Device device;
Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" );
}
@@ -71878,7 +67656,7 @@ namespace VULKAN_HPP_NAMESPACE
template<typename Dispatch>
VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<Device,Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::Device device;
+ VULKAN_HPP_NAMESPACE::Device device;
Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice*>( &device ) ) );
ObjectDestroy<NoParent,Dispatch> deleter( allocator, d );
@@ -71888,22 +67666,22 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( vk::DisplayKHR display, const vk::DisplayModeCreateInfoKHR* pCreateInfo, const vk::AllocationCallbacks* pAllocator, vk::DisplayModeKHR* pMode, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( vk::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const &d ) const
{
- vk::DisplayModeKHR mode;
+ VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, vk::ExtensionProperties* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
}
@@ -71955,7 +67733,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, vk::LayerProperties* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
}
@@ -72007,13 +67785,65 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR*>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( pCounterDescriptions ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Dispatch const &d ) const
+ {
+ std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions;
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ counterDescriptions.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( result == Result::eSuccess )
+ {
+ VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+ counterDescriptions.resize( counterCount );
+ }
+ return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ }
+ template<typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> counters, Allocator const& vectorAllocator, Dispatch const &d ) const
+ {
+ std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions( vectorAllocator );
+ uint32_t counterCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
+ if ( ( result == Result::eSuccess ) && counterCount )
+ {
+ counterDescriptions.resize( counterCount );
+ result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( result == Result::eSuccess )
+ {
+ VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
+ counterDescriptions.resize( counterCount );
+ }
+ return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( vk::DisplayKHR display, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
std::vector<DisplayModeProperties2KHR,Allocator> properties;
uint32_t propertyCount;
@@ -72035,7 +67865,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR,Allocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayModeProperties2KHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -72059,13 +67889,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( vk::DisplayKHR display, uint32_t* pPropertyCount, vk::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( vk::DisplayKHR display, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
std::vector<DisplayModePropertiesKHR,Allocator> properties;
uint32_t propertyCount;
@@ -72087,7 +67917,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( vk::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<DisplayModePropertiesKHR,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -72111,37 +67941,37 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const vk::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, vk::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( pDisplayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const
{
- vk::DisplayPlaneCapabilities2KHR capabilities;
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( &capabilities ) ) );
return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, vk::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( vk::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const
{
- vk::DisplayPlaneCapabilitiesKHR capabilities;
+ VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, vk::DisplayKHR* pDisplays, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
}
@@ -72193,7 +68023,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, vk::TimeDomainEXT* pTimeDomains, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT*>( pTimeDomains ) ) );
}
@@ -72245,7 +68075,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, vk::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( pProperties ) ) );
}
@@ -72297,7 +68127,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, vk::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR*>( pProperties ) ) );
}
@@ -72349,7 +68179,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
}
@@ -72401,7 +68231,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, vk::DisplayProperties2KHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR*>( pProperties ) ) );
}
@@ -72453,7 +68283,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, vk::DisplayPropertiesKHR* pProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
}
@@ -72505,135 +68335,135 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::ExternalBufferProperties externalBufferProperties;
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
return externalBufferProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const vk::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, vk::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( pExternalBufferProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::ExternalBufferProperties externalBufferProperties;
+ VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties*>( &externalBufferProperties ) );
return externalBufferProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::ExternalFenceProperties externalFenceProperties;
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
return externalFenceProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const vk::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, vk::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( pExternalFenceProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::ExternalFenceProperties externalFenceProperties;
+ VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties*>( &externalFenceProperties ) );
return externalFenceProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, vk::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const
{
- vk::ExternalImageFormatPropertiesNV externalImageFormatProperties;
+ VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::ExternalSemaphoreProperties externalSemaphoreProperties;
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
return externalSemaphoreProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const vk::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, vk::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( pExternalSemaphoreProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::ExternalSemaphoreProperties externalSemaphoreProperties;
+ VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties*>( &externalSemaphoreProperties ) );
return externalSemaphoreProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( vk::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceFeatures features;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
return features;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceFeatures2 features;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
return features;
}
@@ -72641,22 +68471,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::PhysicalDeviceFeatures2& features = structureChain.template get<vk::PhysicalDeviceFeatures2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( vk::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( pFeatures ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceFeatures2 features;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
return features;
}
@@ -72664,113 +68494,113 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::PhysicalDeviceFeatures2& features = structureChain.template get<vk::PhysicalDeviceFeatures2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2*>( &features ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( vk::Format format, vk::FormatProperties* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::FormatProperties PhysicalDevice::getFormatProperties( vk::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::FormatProperties formatProperties;
+ VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
return formatProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::FormatProperties2 PhysicalDevice::getFormatProperties2( vk::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::FormatProperties2 formatProperties;
+ VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
return formatProperties;
}
template<typename X, typename Y, typename ...Z, typename Dispatch>
- VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( vk::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::FormatProperties2& formatProperties = structureChain.template get<vk::FormatProperties2>();
+ VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( vk::Format format, vk::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( pFormatProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( vk::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::FormatProperties2 formatProperties;
+ VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
return formatProperties;
}
template<typename X, typename Y, typename ...Z, typename Dispatch>
- VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( vk::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::FormatProperties2& formatProperties = structureChain.template get<vk::FormatProperties2>();
+ VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2*>( &formatProperties ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( vk::DeviceGeneratedCommandsFeaturesNVX* pFeatures, vk::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::DeviceGeneratedCommandsLimitsNVX limits;
+ VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX limits;
d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
return limits;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, vk::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( vk::Format format, vk::ImageType type, vk::ImageTiling tiling, vk::ImageUsageFlags usage, vk::ImageCreateFlags flags, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const
{
- vk::ImageFormatProperties imageFormatProperties;
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
- vk::ImageFormatProperties2 imageFormatProperties;
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
}
@@ -72778,22 +68608,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
- vk::ImageFormatProperties2& imageFormatProperties = structureChain.template get<vk::ImageFormatProperties2>();
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const vk::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, vk::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( pImageFormatProperties ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
- vk::ImageFormatProperties2 imageFormatProperties;
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
}
@@ -72801,37 +68631,37 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
- vk::ImageFormatProperties2& imageFormatProperties = structureChain.template get<vk::ImageFormatProperties2>();
+ VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2*>( &imageFormatProperties ) ) );
return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( vk::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceMemoryProperties memoryProperties;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
return memoryProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceMemoryProperties2 memoryProperties;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
return memoryProperties;
}
@@ -72839,22 +68669,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get<vk::PhysicalDeviceMemoryProperties2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( vk::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( pMemoryProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceMemoryProperties2 memoryProperties;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
return memoryProperties;
}
@@ -72862,35 +68692,35 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get<vk::PhysicalDeviceMemoryProperties2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( &memoryProperties ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, vk::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( pMultisampleProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( vk::SampleCountFlagBits samples, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::MultisamplePropertiesEXT multisampleProperties;
+ VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT*>( &multisampleProperties ) );
return multisampleProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( vk::SurfaceKHR surface, uint32_t* pRectCount, vk::Rect2D* pRects, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D*>( pRects ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
std::vector<Rect2D,Allocator> rects;
uint32_t rectCount;
@@ -72912,7 +68742,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D,Allocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<Rect2D,Allocator> rects( vectorAllocator );
uint32_t rectCount;
@@ -72936,30 +68766,30 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getProperties( vk::PhysicalDeviceProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceProperties properties;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
return properties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceProperties2 properties;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
return properties;
}
@@ -72967,22 +68797,22 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::PhysicalDeviceProperties2& properties = structureChain.template get<vk::PhysicalDeviceProperties2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( vk::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE vk::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
- vk::PhysicalDeviceProperties2 properties;
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
return properties;
}
@@ -72990,14 +68820,29 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
{
StructureChain<X, Y, Z...> structureChain;
- vk::PhysicalDeviceProperties2& properties = structureChain.template get<vk::PhysicalDeviceProperties2>();
+ VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2*>( &properties ) );
return structureChain;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( pPerformanceQueryCreateInfo ), pNumPasses );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT
+ {
+ uint32_t numPasses;
+ d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( &performanceQueryCreateInfo ), &numPasses );
+ return numPasses;
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
}
@@ -73025,7 +68870,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
}
@@ -73057,7 +68902,16 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+ }
return queueFamilyProperties;
}
template<typename StructureChain, typename Allocator, typename Dispatch>
@@ -73067,13 +68921,22 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+ }
return queueFamilyProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, vk::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( pQueueFamilyProperties ) );
}
@@ -73105,7 +68968,16 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+ }
return queueFamilyProperties;
}
template<typename StructureChain, typename Allocator, typename Dispatch>
@@ -73115,19 +68987,28 @@ namespace VULKAN_HPP_NAMESPACE
uint32_t queueFamilyPropertyCount;
d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
queueFamilyProperties.resize( queueFamilyPropertyCount );
- d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( queueFamilyProperties.data() ) );
+ std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> localVector( queueFamilyPropertyCount );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ localVector[i].pNext = queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+ }
+ d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2*>( localVector.data() ) );
+ for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ )
+ {
+ queueFamilyProperties[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = localVector[i];
+ }
return queueFamilyProperties;
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, uint32_t* pPropertyCount, vk::SparseImageFormatProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Dispatch const &d ) const
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties,Allocator> properties;
uint32_t propertyCount;
@@ -73137,7 +69018,7 @@ namespace VULKAN_HPP_NAMESPACE
return properties;
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( vk::Format format, vk::ImageType type, vk::SampleCountFlagBits samples, vk::ImageUsageFlags usage, vk::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SparseImageFormatProperties,Allocator> properties( vectorAllocator );
uint32_t propertyCount;
@@ -73149,7 +69030,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
}
@@ -73177,7 +69058,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const vk::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, vk::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2*>( pProperties ) );
}
@@ -73205,7 +69086,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, vk::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( pCombinations ) ) );
}
@@ -73257,30 +69138,30 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, vk::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
- vk::SurfaceCapabilities2EXT surfaceCapabilities;
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, vk::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
- vk::SurfaceCapabilities2KHR surfaceCapabilities;
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
}
@@ -73288,29 +69169,29 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const
{
StructureChain<X, Y, Z...> structureChain;
- vk::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get<vk::SurfaceCapabilities2KHR>();
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR*>( &surfaceCapabilities ) ) );
return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, vk::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
- vk::SurfaceCapabilitiesKHR surfaceCapabilities;
+ VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, vk::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR*>( pSurfaceFormats ) ) );
}
@@ -73362,13 +69243,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( vk::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, vk::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
uint32_t surfaceFormatCount;
@@ -73390,7 +69271,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<SurfaceFormatKHR,Allocator> surfaceFormats( vectorAllocator );
uint32_t surfaceFormatCount;
@@ -73415,7 +69296,7 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_WIN32_KHR
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const vk::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
}
@@ -73468,13 +69349,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( vk::SurfaceKHR surface, uint32_t* pPresentModeCount, vk::PresentModeKHR* pPresentModes, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
std::vector<PresentModeKHR,Allocator> presentModes;
uint32_t presentModeCount;
@@ -73496,7 +69377,7 @@ namespace VULKAN_HPP_NAMESPACE
return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
}
template<typename Allocator, typename Dispatch>
- VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( vk::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const
{
std::vector<PresentModeKHR,Allocator> presentModes( vectorAllocator );
uint32_t presentModeCount;
@@ -73520,20 +69401,72 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, vk::Bool32* pSupported, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32*>( pSupported ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, vk::SurfaceKHR surface, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const
{
- vk::Bool32 supported;
+ VULKAN_HPP_NAMESPACE::Bool32 supported;
Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32*>( &supported ) ) );
return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" );
}
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+ template<typename Dispatch>
+ VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ {
+ return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( pToolProperties ) ) );
+ }
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+ template<typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Dispatch const &d ) const
+ {
+ std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties;
+ uint32_t toolCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && toolCount )
+ {
+ toolProperties.resize( toolCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( toolProperties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( result == Result::eSuccess )
+ {
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ toolProperties.resize( toolCount );
+ }
+ return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
+ }
+ template<typename Allocator, typename Dispatch>
+ VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT,Allocator>>::type PhysicalDevice::getToolPropertiesEXT(Allocator const& vectorAllocator, Dispatch const &d ) const
+ {
+ std::vector<PhysicalDeviceToolPropertiesEXT,Allocator> toolProperties( vectorAllocator );
+ uint32_t toolCount;
+ Result result;
+ do
+ {
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
+ if ( ( result == Result::eSuccess ) && toolCount )
+ {
+ toolProperties.resize( toolCount );
+ result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( toolProperties.data() ) ) );
+ }
+ } while ( result == Result::eIncomplete );
+ if ( result == Result::eSuccess )
+ {
+ VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+ toolProperties.resize( toolCount );
+ }
+ return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
+ }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
template<typename Dispatch>
VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
@@ -73597,15 +69530,15 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, vk::DisplayKHR* pDisplay, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<vk::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const
{
- vk::DisplayKHR display;
+ VULKAN_HPP_NAMESPACE::DisplayKHR display;
Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" );
}
@@ -73614,13 +69547,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( vk::DisplayKHR display, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" );
@@ -73628,7 +69561,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, vk::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV*>( pCheckpointData ) );
}
@@ -73656,7 +69589,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
}
@@ -73669,13 +69602,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const vk::BindSparseInfo* pBindInfo, vk::Fence fence, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const vk::BindSparseInfo> bindInfo, vk::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" );
@@ -73697,7 +69630,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const vk::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
+ VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT*>( pLabelInfo ) );
}
@@ -73710,7 +69643,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::presentKHR( const vk::PresentInfoKHR* pPresentInfo, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
}
@@ -73725,13 +69658,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( vk::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" );
@@ -73739,13 +69672,13 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const vk::SubmitInfo* pSubmits, vk::Fence fence, Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Queue::submit( ArrayProxy<const vk::SubmitInfo> submits, vk::Fence fence, Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" );
@@ -73754,13 +69687,13 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
template<typename Dispatch>
- VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const
+ VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT
{
return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
}
#else
template<typename Dispatch>
- VULKAN_HPP_INLINE ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
+ VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle(Dispatch const &d ) const
{
Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" );
@@ -73773,13 +69706,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
template <> struct isStructureChainValid<ImageFormatProperties2, AndroidHardwareBufferUsageANDROID>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
- template <> struct isStructureChainValid<AttachmentDescription2KHR, AttachmentDescriptionStencilLayoutKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<AttachmentReference2KHR, AttachmentReferenceStencilLayoutKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<AttachmentDescription2, AttachmentDescriptionStencilLayout>{ enum { value = true }; };
+ template <> struct isStructureChainValid<AttachmentReference2, AttachmentReferenceStencilLayout>{ enum { value = true }; };
template <> struct isStructureChainValid<BindBufferMemoryInfo, BindBufferMemoryDeviceGroupInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemoryDeviceGroupInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<BindImageMemoryInfo, BindImageMemorySwapchainInfoKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<BindImageMemoryInfo, BindImagePlaneMemoryInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<BufferCreateInfo, BufferDeviceAddressCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BufferCreateInfo, BufferOpaqueCaptureAddressCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<CommandBufferInheritanceInfo, CommandBufferInheritanceConditionalRenderingInfoEXT>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
template <> struct isStructureChainValid<SubmitInfo, D3D12FenceSubmitInfoKHR>{ enum { value = true }; };
@@ -73790,9 +69724,9 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<ImageCreateInfo, DedicatedAllocationImageCreateInfoNV>{ enum { value = true }; };
template <> struct isStructureChainValid<MemoryAllocateInfo, DedicatedAllocationMemoryAllocateInfoNV>{ enum { value = true }; };
template <> struct isStructureChainValid<DescriptorPoolCreateInfo, DescriptorPoolInlineUniformBlockCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorSetLayoutCreateInfo, DescriptorSetLayoutBindingFlagsCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorSetAllocateInfo, DescriptorSetVariableDescriptorCountAllocateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DescriptorSetLayoutSupport, DescriptorSetVariableDescriptorCountLayoutSupportEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DescriptorSetLayoutCreateInfo, DescriptorSetLayoutBindingFlagsCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DescriptorSetAllocateInfo, DescriptorSetVariableDescriptorCountAllocateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DescriptorSetLayoutSupport, DescriptorSetVariableDescriptorCountLayoutSupport>{ enum { value = true }; };
template <> struct isStructureChainValid<BindSparseInfo, DeviceGroupBindSparseInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<CommandBufferBeginInfo, DeviceGroupCommandBufferBeginInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, DeviceGroupDeviceCreateInfo>{ enum { value = true }; };
@@ -73830,15 +69764,15 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageCreateInfo, ExternalMemoryImageCreateInfoNV>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageFormatProperties2, FilterCubicImageViewImageFormatPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<FramebufferCreateInfo, FramebufferAttachmentsCreateInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<FramebufferCreateInfo, FramebufferAttachmentsCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierExplicitCreateInfoEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageCreateInfo, ImageDrmFormatModifierListCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<SwapchainCreateInfoKHR, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageFormatListCreateInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, ImageFormatListCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SwapchainCreateInfoKHR, ImageFormatListCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageFormatListCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageMemoryRequirementsInfo2, ImagePlaneMemoryRequirementsInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<ImageCreateInfo, ImageStencilUsageCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageStencilUsageCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<ImageCreateInfo, ImageStencilUsageCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, ImageStencilUsageCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageCreateInfo, ImageSwapchainCreateInfoKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewASTCDecodeModeEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageViewCreateInfo, ImageViewUsageCreateInfo>{ enum { value = true }; };
@@ -73856,16 +69790,20 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryAllocateFlagsInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryDedicatedAllocateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<MemoryRequirements2, MemoryDedicatedRequirements>{ enum { value = true }; };
+ template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryOpaqueCaptureAddressAllocateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<MemoryAllocateInfo, MemoryPriorityAllocateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SubmitInfo, PerformanceQuerySubmitInfoKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice16BitStorageFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice8BitStorageFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice8BitStorageFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevice8BitStorageFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevice8BitStorageFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceASTCDecodeFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBlendOperationAdvancedFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceBlendOperationAdvancedPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBufferDeviceAddressFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBufferDeviceAddressFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceBufferDeviceAddressFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceBufferDeviceAddressFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceCoherentMemoryFeaturesAMD>{ enum { value = true }; };
@@ -73886,18 +69824,18 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDepthClipEnableFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDepthClipEnableFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDepthStencilResolvePropertiesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDescriptorIndexingFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDescriptorIndexingPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDepthStencilResolveProperties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceDescriptorIndexingFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceDescriptorIndexingFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDescriptorIndexingProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDiscardRectanglePropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDriverPropertiesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceDriverProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceExclusiveScissorFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceExternalImageFormatInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceExternalMemoryHostPropertiesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFeatures2>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFloatControlsPropertiesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFloatControlsProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentDensityMapFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentDensityMapFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceFragmentDensityMapPropertiesEXT>{ enum { value = true }; };
@@ -73905,13 +69843,13 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderBarycentricFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceFragmentShaderInterlockFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceFragmentShaderInterlockFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceHostQueryResetFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceHostQueryResetFeaturesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceHostQueryResetFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceHostQueryResetFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceIDProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageDrmFormatModifierInfoEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceImageFormatInfo2, PhysicalDeviceImageViewImageFormatInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceImagelessFramebufferFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceImagelessFramebufferFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceImagelessFramebufferFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceImagelessFramebufferFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceIndexTypeUint8FeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceIndexTypeUint8FeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceInlineUniformBlockFeaturesEXT>{ enum { value = true }; };
@@ -73932,6 +69870,9 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceMultiviewProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePCIBusInfoPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevicePerformanceQueryFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevicePerformanceQueryFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePerformanceQueryPropertiesKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDevicePointClippingProperties>{ enum { value = true }; };
@@ -73943,15 +69884,15 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceRepresentativeFragmentTestFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSampleLocationsPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSamplerFilterMinmaxPropertiesEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceSamplerFilterMinmaxProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSamplerYcbcrConversionFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceScalarBlockLayoutFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceScalarBlockLayoutFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderAtomicInt64FeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderAtomicInt64FeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceScalarBlockLayoutFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceScalarBlockLayoutFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceSeparateDepthStencilLayoutsFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceSeparateDepthStencilLayoutsFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderAtomicInt64Features>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderAtomicInt64Features>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderClockFeaturesKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderClockFeaturesKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderCoreProperties2AMD>{ enum { value = true }; };
@@ -73960,8 +69901,8 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderDrawParametersFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderDrawParametersFeatures>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderFloat16Int8FeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderFloat16Int8FeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderFloat16Int8Features>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderFloat16Int8Features>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderImageFootprintFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>{ enum { value = true }; };
@@ -73969,8 +69910,8 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSMBuiltinsFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSMBuiltinsFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShaderSMBuiltinsPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShaderSubgroupExtendedTypesFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShaderSubgroupExtendedTypesFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceShadingRateImageFeaturesNV>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceShadingRateImagePropertiesNV>{ enum { value = true }; };
@@ -73983,21 +69924,27 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTexelBufferAlignmentPropertiesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTimelineSemaphoreFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTimelineSemaphoreFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTimelineSemaphorePropertiesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTimelineSemaphoreFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTimelineSemaphoreFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTimelineSemaphoreProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceTransformFeedbackFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceTransformFeedbackPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceUniformBufferStandardLayoutFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceUniformBufferStandardLayoutFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVariablePointersFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVariablePointersFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVertexAttributeDivisorFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVertexAttributeDivisorPropertiesEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkanMemoryModelFeaturesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkanMemoryModelFeaturesKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkan11Features>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkan11Features>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVulkan11Properties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkan12Features>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkan12Features>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceProperties2, PhysicalDeviceVulkan12Properties>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceVulkanMemoryModelFeatures>{ enum { value = true }; };
+ template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceVulkanMemoryModelFeatures>{ enum { value = true }; };
template <> struct isStructureChainValid<PhysicalDeviceFeatures2, PhysicalDeviceYcbcrImageArraysFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<DeviceCreateInfo, PhysicalDeviceYcbcrImageArraysFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<PipelineColorBlendStateCreateInfo, PipelineColorBlendAdvancedStateCreateInfoEXT>{ enum { value = true }; };
@@ -74031,23 +69978,24 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<PresentInfoKHR, PresentRegionsKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<PresentInfoKHR, PresentTimesInfoGOOGLE>{ enum { value = true }; };
template <> struct isStructureChainValid<SubmitInfo, ProtectedSubmitInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<QueryPoolCreateInfo, QueryPoolPerformanceCreateInfoKHR>{ enum { value = true }; };
template <> struct isStructureChainValid<QueueFamilyProperties2, QueueFamilyCheckpointPropertiesNV>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassAttachmentBeginInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassAttachmentBeginInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<RenderPassCreateInfo2KHR, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<RenderPassCreateInfo2, RenderPassFragmentDensityMapCreateInfoEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassInputAttachmentAspectCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<RenderPassCreateInfo, RenderPassMultiviewCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<RenderPassBeginInfo, RenderPassSampleLocationsBeginInfoEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageMemoryBarrier, SampleLocationsInfoEXT>{ enum { value = true }; };
- template <> struct isStructureChainValid<SamplerCreateInfo, SamplerReductionModeCreateInfoEXT>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SamplerCreateInfo, SamplerReductionModeCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageFormatProperties2, SamplerYcbcrConversionImageFormatProperties>{ enum { value = true }; };
template <> struct isStructureChainValid<SamplerCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageViewCreateInfo, SamplerYcbcrConversionInfo>{ enum { value = true }; };
- template <> struct isStructureChainValid<SemaphoreCreateInfo, SemaphoreTypeCreateInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<PhysicalDeviceExternalSemaphoreInfo, SemaphoreTypeCreateInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SemaphoreCreateInfo, SemaphoreTypeCreateInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<PhysicalDeviceExternalSemaphoreInfo, SemaphoreTypeCreateInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<ShaderModuleCreateInfo, ShaderModuleValidationCacheCreateInfoEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SharedPresentSurfaceCapabilitiesKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubpassDescription2KHR, SubpassDescriptionDepthStencilResolveKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SubpassDescription2, SubpassDescriptionDepthStencilResolve>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
template <> struct isStructureChainValid<SurfaceCapabilities2KHR, SurfaceCapabilitiesFullScreenExclusiveEXT>{ enum { value = true }; };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
@@ -74063,8 +70011,8 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainCounterCreateInfoEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<SwapchainCreateInfoKHR, SwapchainDisplayNativeHdrCreateInfoAMD>{ enum { value = true }; };
template <> struct isStructureChainValid<ImageFormatProperties2, TextureLODGatherFormatPropertiesAMD>{ enum { value = true }; };
- template <> struct isStructureChainValid<SubmitInfo, TimelineSemaphoreSubmitInfoKHR>{ enum { value = true }; };
- template <> struct isStructureChainValid<BindSparseInfo, TimelineSemaphoreSubmitInfoKHR>{ enum { value = true }; };
+ template <> struct isStructureChainValid<SubmitInfo, TimelineSemaphoreSubmitInfo>{ enum { value = true }; };
+ template <> struct isStructureChainValid<BindSparseInfo, TimelineSemaphoreSubmitInfo>{ enum { value = true }; };
template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFeaturesEXT>{ enum { value = true }; };
template <> struct isStructureChainValid<InstanceCreateInfo, ValidationFlagsEXT>{ enum { value = true }; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
@@ -74076,10 +70024,6 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetAccelerationStructureNV>{ enum { value = true }; };
template <> struct isStructureChainValid<WriteDescriptorSet, WriteDescriptorSetInlineUniformBlockEXT>{ enum { value = true }; };
-#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
-# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
-#endif
-
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
class DynamicLoader
{
@@ -74095,7 +70039,7 @@ namespace VULKAN_HPP_NAMESPACE
#elif defined(__APPLE__)
m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
#elif defined(_WIN32)
- m_library = LoadLibrary( "vulkan-1.dll" );
+ m_library = LoadLibrary( TEXT( "vulkan-1.dll" ) );
#else
assert( false && "unsupported platform" );
#endif
@@ -74159,6 +70103,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
+ PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
@@ -74188,10 +70133,12 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdDraw vkCmdDraw = 0;
PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
+ PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
+ PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
@@ -74202,12 +70149,14 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdEndQuery vkCmdEndQuery = 0;
PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
+ PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
+ PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0;
@@ -74255,6 +70204,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+ PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
PFN_vkAllocateMemory vkAllocateMemory = 0;
@@ -74288,6 +70238,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCreateQueryPool vkCreateQueryPool = 0;
PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
PFN_vkCreateRenderPass vkCreateRenderPass = 0;
+ PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
PFN_vkCreateSampler vkCreateSampler = 0;
PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
@@ -74338,10 +70289,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
+ PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+ PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
+ PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
@@ -74353,6 +70308,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
@@ -74396,6 +70353,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+ PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
#ifdef VK_USE_PLATFORM_WIN32_KHR
@@ -74426,16 +70384,19 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+ PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
PFN_vkResetCommandPool vkResetCommandPool = 0;
PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
PFN_vkResetEvent vkResetEvent = 0;
PFN_vkResetFences vkResetFences = 0;
+ PFN_vkResetQueryPool vkResetQueryPool = 0;
PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
PFN_vkSetEvent vkSetEvent = 0;
PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+ PFN_vkSignalSemaphore vkSignalSemaphore = 0;
PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
PFN_vkTrimCommandPool vkTrimCommandPool = 0;
PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
@@ -74446,6 +70407,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
PFN_vkWaitForFences vkWaitForFences = 0;
+ PFN_vkWaitSemaphores vkWaitSemaphores = 0;
PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
#ifdef VK_USE_PLATFORM_ANDROID_KHR
PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
@@ -74501,6 +70463,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
@@ -74537,6 +70500,7 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+ PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
@@ -74554,6 +70518,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+ PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
@@ -74585,15 +70550,18 @@ namespace VULKAN_HPP_NAMESPACE
#if !defined(VK_NO_PROTOTYPES)
// This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
- DispatchLoaderDynamic(vk::Instance const& instance, vk::Device const& device) VULKAN_HPP_NOEXCEPT
+ DispatchLoaderDynamic(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
{
init(instance, device);
}
// This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
- void init(vk::Instance const& instance, vk::Device const& device) VULKAN_HPP_NOEXCEPT
+ void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
{
- init(static_cast<VkInstance>(instance), ::vkGetInstanceProcAddr, static_cast<VkDevice>(device), device ? ::vkGetDeviceProcAddr : nullptr);
+ static vk::DynamicLoader dl;
+ PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
+ PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
+ init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
}
#endif // !defined(VK_NO_PROTOTYPES)
@@ -74624,14 +70592,15 @@ namespace VULKAN_HPP_NAMESPACE
{
VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
vkGetInstanceProcAddr = getInstanceProcAddr;
- init( vk::Instance(instance) );
+ init( VULKAN_HPP_NAMESPACE::Instance(instance) );
if (device) {
- init( vk::Device(device) );
+ init( VULKAN_HPP_NAMESPACE::Device(device) );
}
}
- void init( vk::Instance instance ) VULKAN_HPP_NOEXCEPT
+ void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
{
+ VkInstance instance = static_cast<VkInstance>(instanceCpp);
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
@@ -74685,6 +70654,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+ vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
@@ -74721,6 +70691,7 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+ vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
@@ -74738,6 +70709,7 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+ vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
@@ -74760,6 +70732,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
@@ -74789,10 +70762,12 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
@@ -74803,12 +70778,14 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) );
@@ -74856,6 +70833,7 @@ namespace VULKAN_HPP_NAMESPACE
vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
@@ -74889,6 +70867,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
@@ -74939,10 +70918,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
+ vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
@@ -74954,6 +70937,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
@@ -74997,6 +70982,7 @@ namespace VULKAN_HPP_NAMESPACE
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
@@ -75027,16 +71013,19 @@ namespace VULKAN_HPP_NAMESPACE
vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
+ vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
@@ -75047,6 +71036,7 @@ namespace VULKAN_HPP_NAMESPACE
vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
@@ -75059,14 +71049,16 @@ namespace VULKAN_HPP_NAMESPACE
vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
}
- void init( vk::Device device ) VULKAN_HPP_NOEXCEPT
+ void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
{
+ VkDevice device = static_cast<VkDevice>(deviceCpp);
vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+ vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
@@ -75096,10 +71088,12 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+ vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+ vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
@@ -75110,12 +71104,14 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+ vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+ vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) );
@@ -75163,6 +71159,7 @@ namespace VULKAN_HPP_NAMESPACE
vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+ vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
@@ -75196,6 +71193,7 @@ namespace VULKAN_HPP_NAMESPACE
vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+ vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
@@ -75246,10 +71244,14 @@ namespace VULKAN_HPP_NAMESPACE
#ifdef VK_USE_PLATFORM_ANDROID_KHR
vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+ vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+ vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+ vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+ vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
@@ -75261,6 +71263,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+ vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
@@ -75304,6 +71308,7 @@ namespace VULKAN_HPP_NAMESPACE
vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+ vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
#ifdef VK_USE_PLATFORM_WIN32_KHR
@@ -75334,16 +71339,19 @@ namespace VULKAN_HPP_NAMESPACE
vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+ vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+ vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
+ vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
@@ -75354,6 +71362,7 @@ namespace VULKAN_HPP_NAMESPACE
vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+ vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
diff --git a/thirdparty/vulkan/include/vulkan/vulkan_core.h b/thirdparty/vulkan/include/vulkan/vulkan_core.h
index 5f7c485bfa..ea96fc43ed 100644
--- a/thirdparty/vulkan/include/vulkan/vulkan_core.h
+++ b/thirdparty/vulkan/include/vulkan/vulkan_core.h
@@ -44,7 +44,7 @@ extern "C" {
#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3ff)
#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xfff)
// Version of this file
-#define VK_HEADER_VERSION 127
+#define VK_HEADER_VERSION 131
#define VK_NULL_HANDLE 0
@@ -133,8 +133,11 @@ typedef enum VkResult {
VK_ERROR_TOO_MANY_OBJECTS = -10,
VK_ERROR_FORMAT_NOT_SUPPORTED = -11,
VK_ERROR_FRAGMENTED_POOL = -12,
+ VK_ERROR_UNKNOWN = -13,
VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000,
VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003,
+ VK_ERROR_FRAGMENTATION = -1000161000,
+ VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000,
VK_ERROR_SURFACE_LOST_KHR = -1000000000,
VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001,
VK_SUBOPTIMAL_KHR = 1000001003,
@@ -143,15 +146,16 @@ typedef enum VkResult {
VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
VK_ERROR_INVALID_SHADER_NV = -1000012000,
VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000,
- VK_ERROR_FRAGMENTATION_EXT = -1000161000,
VK_ERROR_NOT_PERMITTED_EXT = -1000174001,
- VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = -1000244000,
VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000,
VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
- VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL,
+ VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION,
+ VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+ VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+ VK_RESULT_BEGIN_RANGE = VK_ERROR_UNKNOWN,
VK_RESULT_END_RANGE = VK_INCOMPLETE,
- VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1),
+ VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_UNKNOWN + 1),
VK_RESULT_MAX_ENUM = 0x7FFFFFFF
} VkResult;
@@ -270,6 +274,56 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000,
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52,
+ VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000,
+ VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000,
+ VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001,
+ VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002,
+ VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003,
+ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004,
+ VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005,
+ VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000,
+ VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000,
+ VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000,
+ VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000,
+ VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001,
+ VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002,
+ VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000,
+ VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001,
+ VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001,
+ VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002,
+ VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003,
+ VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004,
+ VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000,
+ VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001,
+ VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002,
+ VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003,
+ VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004,
VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000,
VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001,
VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007,
@@ -329,7 +383,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001,
VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = 1000082000,
VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000,
VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000,
VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001,
@@ -353,23 +406,19 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000,
VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001,
VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = 1000108000,
- VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = 1000108001,
- VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = 1000108002,
- VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = 1000108003,
- VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = 1000109000,
- VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = 1000109001,
- VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = 1000109002,
- VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = 1000109003,
- VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = 1000109004,
- VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = 1000109005,
- VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = 1000109006,
VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000,
VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000,
VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001,
VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002,
VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000,
VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001,
+ VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002,
+ VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003,
+ VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004,
+ VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005,
+ VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000,
VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001,
VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002,
@@ -391,8 +440,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003,
VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004,
VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000,
- VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = 1000138000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = 1000138001,
VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = 1000138002,
@@ -402,7 +449,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003,
VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004,
- VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = 1000147000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001,
VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002,
@@ -418,11 +464,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005,
VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000,
VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001,
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = 1000161000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = 1000161001,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = 1000161002,
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = 1000161003,
- VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = 1000161004,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002,
@@ -443,12 +484,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000,
VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001,
VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = 1000174000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = 1000175000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = 1000177000,
VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000,
VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = 1000180000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000,
VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000,
VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000,
@@ -459,10 +497,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002,
VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000,
VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000192000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = 1000196000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = 1000197000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = 1000199000,
- VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = 1000199001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001,
@@ -472,12 +506,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002,
VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000,
VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = 1000207000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = 1000207001,
- VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = 1000207002,
- VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = 1000207003,
- VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = 1000207004,
- VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = 1000207005,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000,
VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = 1000210000,
VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001,
@@ -485,7 +513,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003,
VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004,
VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = 1000211000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000,
VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000,
VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001,
@@ -494,7 +521,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001,
VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = 1000221000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = 1000225000,
VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = 1000225001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = 1000225002,
@@ -505,13 +531,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001,
VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = 1000241000,
- VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = 1000241001,
- VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = 1000241002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000,
- VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = 1000244001,
VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002,
- VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = 1000246000,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = 1000245000,
VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000,
VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001,
@@ -521,7 +543,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = 1000253000,
VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000,
VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002,
VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001,
@@ -529,7 +550,6 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000,
VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = 1000261000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000,
VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001,
@@ -575,10 +595,22 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
- VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+ VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+ VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+ VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+ VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+ VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+ VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+ VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+ VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+ VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+ VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
@@ -590,11 +622,14 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+ VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+ VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
@@ -603,9 +638,41 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+ VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+ VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+ VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+ VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+ VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+ VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+ VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+ VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+ VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+ VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+ VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+ VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+ VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO,
VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1),
@@ -951,6 +1018,7 @@ typedef enum VkQueryType {
VK_QUERY_TYPE_PIPELINE_STATISTICS = 1,
VK_QUERY_TYPE_TIMESTAMP = 2,
VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004,
+ VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000,
VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000,
VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000,
VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION,
@@ -980,16 +1048,20 @@ typedef enum VkImageLayout {
VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000,
VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000,
+ VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001,
+ VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002,
+ VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = 1000164003,
VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
- VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = 1000241000,
- VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = 1000241001,
- VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = 1000241002,
- VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = 1000241003,
VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED,
VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1),
@@ -1437,11 +1509,12 @@ typedef enum VkFormatFeatureFlagBits {
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000,
VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000,
VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000,
+ VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000,
- VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000,
VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000,
VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+ VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
@@ -1649,7 +1722,9 @@ typedef enum VkBufferCreateFlagBits {
VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008,
- VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000010,
+ VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010,
+ VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+ VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkBufferCreateFlagBits;
typedef VkFlags VkBufferCreateFlags;
@@ -1664,11 +1739,13 @@ typedef enum VkBufferUsageFlagBits {
VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040,
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080,
VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100,
+ VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000,
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800,
VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000,
VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200,
VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = 0x00000400,
- VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = 0x00020000,
+ VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+ VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkBufferUsageFlagBits;
typedef VkFlags VkBufferUsageFlags;
@@ -1691,10 +1768,11 @@ typedef enum VkPipelineCreateFlagBits {
VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008,
- VK_PIPELINE_CREATE_DISPATCH_BASE = 0x00000010,
+ VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010,
VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020,
VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040,
VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080,
+ VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
@@ -1765,22 +1843,25 @@ typedef enum VkSamplerCreateFlagBits {
typedef VkFlags VkSamplerCreateFlags;
typedef enum VkDescriptorSetLayoutCreateFlagBits {
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
- VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = 0x00000002,
+ VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorSetLayoutCreateFlagBits;
typedef VkFlags VkDescriptorSetLayoutCreateFlags;
typedef enum VkDescriptorPoolCreateFlagBits {
VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
- VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = 0x00000002,
+ VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002,
+ VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkDescriptorPoolCreateFlagBits;
typedef VkFlags VkDescriptorPoolCreateFlags;
typedef VkFlags VkDescriptorPoolResetFlags;
typedef enum VkFramebufferCreateFlagBits {
- VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = 0x00000001,
+ VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001,
+ VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkFramebufferCreateFlagBits;
typedef VkFlags VkFramebufferCreateFlags;
@@ -4051,7 +4132,11 @@ typedef VkFlags VkPeerMemoryFeatureFlags;
typedef enum VkMemoryAllocateFlagBits {
VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001,
+ VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002,
+ VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004,
VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+ VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+ VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkMemoryAllocateFlagBits;
typedef VkFlags VkMemoryAllocateFlags;
@@ -4818,6 +4903,760 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
#endif
+#define VK_VERSION_1_2 1
+// Vulkan 1.2 version number
+#define VK_API_VERSION_1_2 VK_MAKE_VERSION(1, 2, 0)// Patch version should always be set to 0
+
+typedef uint64_t VkDeviceAddress;
+#define VK_MAX_DRIVER_NAME_SIZE 256
+#define VK_MAX_DRIVER_INFO_SIZE 256
+
+typedef enum VkDriverId {
+ VK_DRIVER_ID_AMD_PROPRIETARY = 1,
+ VK_DRIVER_ID_AMD_OPEN_SOURCE = 2,
+ VK_DRIVER_ID_MESA_RADV = 3,
+ VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4,
+ VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5,
+ VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6,
+ VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7,
+ VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8,
+ VK_DRIVER_ID_ARM_PROPRIETARY = 9,
+ VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10,
+ VK_DRIVER_ID_GGP_PROPRIETARY = 11,
+ VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12,
+ VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY,
+ VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+ VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV,
+ VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+ VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+ VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+ VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+ VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+ VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY,
+ VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+ VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY,
+ VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+ VK_DRIVER_ID_BEGIN_RANGE = VK_DRIVER_ID_AMD_PROPRIETARY,
+ VK_DRIVER_ID_END_RANGE = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+ VK_DRIVER_ID_RANGE_SIZE = (VK_DRIVER_ID_BROADCOM_PROPRIETARY - VK_DRIVER_ID_AMD_PROPRIETARY + 1),
+ VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF
+} VkDriverId;
+
+typedef enum VkShaderFloatControlsIndependence {
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_BEGIN_RANGE = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_END_RANGE = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE,
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_RANGE_SIZE = (VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY + 1),
+ VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF
+} VkShaderFloatControlsIndependence;
+
+typedef enum VkSamplerReductionMode {
+ VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0,
+ VK_SAMPLER_REDUCTION_MODE_MIN = 1,
+ VK_SAMPLER_REDUCTION_MODE_MAX = 2,
+ VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+ VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN,
+ VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX,
+ VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+ VK_SAMPLER_REDUCTION_MODE_END_RANGE = VK_SAMPLER_REDUCTION_MODE_MAX,
+ VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE = (VK_SAMPLER_REDUCTION_MODE_MAX - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE + 1),
+ VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerReductionMode;
+
+typedef enum VkSemaphoreType {
+ VK_SEMAPHORE_TYPE_BINARY = 0,
+ VK_SEMAPHORE_TYPE_TIMELINE = 1,
+ VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY,
+ VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE,
+ VK_SEMAPHORE_TYPE_BEGIN_RANGE = VK_SEMAPHORE_TYPE_BINARY,
+ VK_SEMAPHORE_TYPE_END_RANGE = VK_SEMAPHORE_TYPE_TIMELINE,
+ VK_SEMAPHORE_TYPE_RANGE_SIZE = (VK_SEMAPHORE_TYPE_TIMELINE - VK_SEMAPHORE_TYPE_BINARY + 1),
+ VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreType;
+
+typedef enum VkResolveModeFlagBits {
+ VK_RESOLVE_MODE_NONE = 0,
+ VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001,
+ VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002,
+ VK_RESOLVE_MODE_MIN_BIT = 0x00000004,
+ VK_RESOLVE_MODE_MAX_BIT = 0x00000008,
+ VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE,
+ VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+ VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT,
+ VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT,
+ VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT,
+ VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkResolveModeFlagBits;
+typedef VkFlags VkResolveModeFlags;
+
+typedef enum VkDescriptorBindingFlagBits {
+ VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001,
+ VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002,
+ VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004,
+ VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008,
+ VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+ VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+ VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+ VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT,
+ VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorBindingFlagBits;
+typedef VkFlags VkDescriptorBindingFlags;
+
+typedef enum VkSemaphoreWaitFlagBits {
+ VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001,
+ VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT,
+ VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreWaitFlagBits;
+typedef VkFlags VkSemaphoreWaitFlags;
+typedef struct VkPhysicalDeviceVulkan11Features {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 storageBuffer16BitAccess;
+ VkBool32 uniformAndStorageBuffer16BitAccess;
+ VkBool32 storagePushConstant16;
+ VkBool32 storageInputOutput16;
+ VkBool32 multiview;
+ VkBool32 multiviewGeometryShader;
+ VkBool32 multiviewTessellationShader;
+ VkBool32 variablePointersStorageBuffer;
+ VkBool32 variablePointers;
+ VkBool32 protectedMemory;
+ VkBool32 samplerYcbcrConversion;
+ VkBool32 shaderDrawParameters;
+} VkPhysicalDeviceVulkan11Features;
+
+typedef struct VkPhysicalDeviceVulkan11Properties {
+ VkStructureType sType;
+ void* pNext;
+ uint8_t deviceUUID[VK_UUID_SIZE];
+ uint8_t driverUUID[VK_UUID_SIZE];
+ uint8_t deviceLUID[VK_LUID_SIZE];
+ uint32_t deviceNodeMask;
+ VkBool32 deviceLUIDValid;
+ uint32_t subgroupSize;
+ VkShaderStageFlags subgroupSupportedStages;
+ VkSubgroupFeatureFlags subgroupSupportedOperations;
+ VkBool32 subgroupQuadOperationsInAllStages;
+ VkPointClippingBehavior pointClippingBehavior;
+ uint32_t maxMultiviewViewCount;
+ uint32_t maxMultiviewInstanceIndex;
+ VkBool32 protectedNoFault;
+ uint32_t maxPerSetDescriptors;
+ VkDeviceSize maxMemoryAllocationSize;
+} VkPhysicalDeviceVulkan11Properties;
+
+typedef struct VkPhysicalDeviceVulkan12Features {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 samplerMirrorClampToEdge;
+ VkBool32 drawIndirectCount;
+ VkBool32 storageBuffer8BitAccess;
+ VkBool32 uniformAndStorageBuffer8BitAccess;
+ VkBool32 storagePushConstant8;
+ VkBool32 shaderBufferInt64Atomics;
+ VkBool32 shaderSharedInt64Atomics;
+ VkBool32 shaderFloat16;
+ VkBool32 shaderInt8;
+ VkBool32 descriptorIndexing;
+ VkBool32 shaderInputAttachmentArrayDynamicIndexing;
+ VkBool32 shaderUniformTexelBufferArrayDynamicIndexing;
+ VkBool32 shaderStorageTexelBufferArrayDynamicIndexing;
+ VkBool32 shaderUniformBufferArrayNonUniformIndexing;
+ VkBool32 shaderSampledImageArrayNonUniformIndexing;
+ VkBool32 shaderStorageBufferArrayNonUniformIndexing;
+ VkBool32 shaderStorageImageArrayNonUniformIndexing;
+ VkBool32 shaderInputAttachmentArrayNonUniformIndexing;
+ VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing;
+ VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing;
+ VkBool32 descriptorBindingUniformBufferUpdateAfterBind;
+ VkBool32 descriptorBindingSampledImageUpdateAfterBind;
+ VkBool32 descriptorBindingStorageImageUpdateAfterBind;
+ VkBool32 descriptorBindingStorageBufferUpdateAfterBind;
+ VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
+ VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
+ VkBool32 descriptorBindingUpdateUnusedWhilePending;
+ VkBool32 descriptorBindingPartiallyBound;
+ VkBool32 descriptorBindingVariableDescriptorCount;
+ VkBool32 runtimeDescriptorArray;
+ VkBool32 samplerFilterMinmax;
+ VkBool32 scalarBlockLayout;
+ VkBool32 imagelessFramebuffer;
+ VkBool32 uniformBufferStandardLayout;
+ VkBool32 shaderSubgroupExtendedTypes;
+ VkBool32 separateDepthStencilLayouts;
+ VkBool32 hostQueryReset;
+ VkBool32 timelineSemaphore;
+ VkBool32 bufferDeviceAddress;
+ VkBool32 bufferDeviceAddressCaptureReplay;
+ VkBool32 bufferDeviceAddressMultiDevice;
+ VkBool32 vulkanMemoryModel;
+ VkBool32 vulkanMemoryModelDeviceScope;
+ VkBool32 vulkanMemoryModelAvailabilityVisibilityChains;
+ VkBool32 shaderOutputViewportIndex;
+ VkBool32 shaderOutputLayer;
+ VkBool32 subgroupBroadcastDynamicId;
+} VkPhysicalDeviceVulkan12Features;
+
+typedef struct VkConformanceVersion {
+ uint8_t major;
+ uint8_t minor;
+ uint8_t subminor;
+ uint8_t patch;
+} VkConformanceVersion;
+
+typedef struct VkPhysicalDeviceVulkan12Properties {
+ VkStructureType sType;
+ void* pNext;
+ VkDriverId driverID;
+ char driverName[VK_MAX_DRIVER_NAME_SIZE];
+ char driverInfo[VK_MAX_DRIVER_INFO_SIZE];
+ VkConformanceVersion conformanceVersion;
+ VkShaderFloatControlsIndependence denormBehaviorIndependence;
+ VkShaderFloatControlsIndependence roundingModeIndependence;
+ VkBool32 shaderSignedZeroInfNanPreserveFloat16;
+ VkBool32 shaderSignedZeroInfNanPreserveFloat32;
+ VkBool32 shaderSignedZeroInfNanPreserveFloat64;
+ VkBool32 shaderDenormPreserveFloat16;
+ VkBool32 shaderDenormPreserveFloat32;
+ VkBool32 shaderDenormPreserveFloat64;
+ VkBool32 shaderDenormFlushToZeroFloat16;
+ VkBool32 shaderDenormFlushToZeroFloat32;
+ VkBool32 shaderDenormFlushToZeroFloat64;
+ VkBool32 shaderRoundingModeRTEFloat16;
+ VkBool32 shaderRoundingModeRTEFloat32;
+ VkBool32 shaderRoundingModeRTEFloat64;
+ VkBool32 shaderRoundingModeRTZFloat16;
+ VkBool32 shaderRoundingModeRTZFloat32;
+ VkBool32 shaderRoundingModeRTZFloat64;
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools;
+ VkBool32 shaderUniformBufferArrayNonUniformIndexingNative;
+ VkBool32 shaderSampledImageArrayNonUniformIndexingNative;
+ VkBool32 shaderStorageBufferArrayNonUniformIndexingNative;
+ VkBool32 shaderStorageImageArrayNonUniformIndexingNative;
+ VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative;
+ VkBool32 robustBufferAccessUpdateAfterBind;
+ VkBool32 quadDivergentImplicitLod;
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
+ uint32_t maxPerStageUpdateAfterBindResources;
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers;
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
+ VkResolveModeFlags supportedDepthResolveModes;
+ VkResolveModeFlags supportedStencilResolveModes;
+ VkBool32 independentResolveNone;
+ VkBool32 independentResolve;
+ VkBool32 filterMinmaxSingleComponentFormats;
+ VkBool32 filterMinmaxImageComponentMapping;
+ uint64_t maxTimelineSemaphoreValueDifference;
+ VkSampleCountFlags framebufferIntegerColorSampleCounts;
+} VkPhysicalDeviceVulkan12Properties;
+
+typedef struct VkImageFormatListCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t viewFormatCount;
+ const VkFormat* pViewFormats;
+} VkImageFormatListCreateInfo;
+
+typedef struct VkAttachmentDescription2 {
+ VkStructureType sType;
+ const void* pNext;
+ VkAttachmentDescriptionFlags flags;
+ VkFormat format;
+ VkSampleCountFlagBits samples;
+ VkAttachmentLoadOp loadOp;
+ VkAttachmentStoreOp storeOp;
+ VkAttachmentLoadOp stencilLoadOp;
+ VkAttachmentStoreOp stencilStoreOp;
+ VkImageLayout initialLayout;
+ VkImageLayout finalLayout;
+} VkAttachmentDescription2;
+
+typedef struct VkAttachmentReference2 {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t attachment;
+ VkImageLayout layout;
+ VkImageAspectFlags aspectMask;
+} VkAttachmentReference2;
+
+typedef struct VkSubpassDescription2 {
+ VkStructureType sType;
+ const void* pNext;
+ VkSubpassDescriptionFlags flags;
+ VkPipelineBindPoint pipelineBindPoint;
+ uint32_t viewMask;
+ uint32_t inputAttachmentCount;
+ const VkAttachmentReference2* pInputAttachments;
+ uint32_t colorAttachmentCount;
+ const VkAttachmentReference2* pColorAttachments;
+ const VkAttachmentReference2* pResolveAttachments;
+ const VkAttachmentReference2* pDepthStencilAttachment;
+ uint32_t preserveAttachmentCount;
+ const uint32_t* pPreserveAttachments;
+} VkSubpassDescription2;
+
+typedef struct VkSubpassDependency2 {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t srcSubpass;
+ uint32_t dstSubpass;
+ VkPipelineStageFlags srcStageMask;
+ VkPipelineStageFlags dstStageMask;
+ VkAccessFlags srcAccessMask;
+ VkAccessFlags dstAccessMask;
+ VkDependencyFlags dependencyFlags;
+ int32_t viewOffset;
+} VkSubpassDependency2;
+
+typedef struct VkRenderPassCreateInfo2 {
+ VkStructureType sType;
+ const void* pNext;
+ VkRenderPassCreateFlags flags;
+ uint32_t attachmentCount;
+ const VkAttachmentDescription2* pAttachments;
+ uint32_t subpassCount;
+ const VkSubpassDescription2* pSubpasses;
+ uint32_t dependencyCount;
+ const VkSubpassDependency2* pDependencies;
+ uint32_t correlatedViewMaskCount;
+ const uint32_t* pCorrelatedViewMasks;
+} VkRenderPassCreateInfo2;
+
+typedef struct VkSubpassBeginInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkSubpassContents contents;
+} VkSubpassBeginInfo;
+
+typedef struct VkSubpassEndInfo {
+ VkStructureType sType;
+ const void* pNext;
+} VkSubpassEndInfo;
+
+typedef struct VkPhysicalDevice8BitStorageFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 storageBuffer8BitAccess;
+ VkBool32 uniformAndStorageBuffer8BitAccess;
+ VkBool32 storagePushConstant8;
+} VkPhysicalDevice8BitStorageFeatures;
+
+typedef struct VkPhysicalDeviceDriverProperties {
+ VkStructureType sType;
+ void* pNext;
+ VkDriverId driverID;
+ char driverName[VK_MAX_DRIVER_NAME_SIZE];
+ char driverInfo[VK_MAX_DRIVER_INFO_SIZE];
+ VkConformanceVersion conformanceVersion;
+} VkPhysicalDeviceDriverProperties;
+
+typedef struct VkPhysicalDeviceShaderAtomicInt64Features {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderBufferInt64Atomics;
+ VkBool32 shaderSharedInt64Atomics;
+} VkPhysicalDeviceShaderAtomicInt64Features;
+
+typedef struct VkPhysicalDeviceShaderFloat16Int8Features {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderFloat16;
+ VkBool32 shaderInt8;
+} VkPhysicalDeviceShaderFloat16Int8Features;
+
+typedef struct VkPhysicalDeviceFloatControlsProperties {
+ VkStructureType sType;
+ void* pNext;
+ VkShaderFloatControlsIndependence denormBehaviorIndependence;
+ VkShaderFloatControlsIndependence roundingModeIndependence;
+ VkBool32 shaderSignedZeroInfNanPreserveFloat16;
+ VkBool32 shaderSignedZeroInfNanPreserveFloat32;
+ VkBool32 shaderSignedZeroInfNanPreserveFloat64;
+ VkBool32 shaderDenormPreserveFloat16;
+ VkBool32 shaderDenormPreserveFloat32;
+ VkBool32 shaderDenormPreserveFloat64;
+ VkBool32 shaderDenormFlushToZeroFloat16;
+ VkBool32 shaderDenormFlushToZeroFloat32;
+ VkBool32 shaderDenormFlushToZeroFloat64;
+ VkBool32 shaderRoundingModeRTEFloat16;
+ VkBool32 shaderRoundingModeRTEFloat32;
+ VkBool32 shaderRoundingModeRTEFloat64;
+ VkBool32 shaderRoundingModeRTZFloat16;
+ VkBool32 shaderRoundingModeRTZFloat32;
+ VkBool32 shaderRoundingModeRTZFloat64;
+} VkPhysicalDeviceFloatControlsProperties;
+
+typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t bindingCount;
+ const VkDescriptorBindingFlags* pBindingFlags;
+} VkDescriptorSetLayoutBindingFlagsCreateInfo;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderInputAttachmentArrayDynamicIndexing;
+ VkBool32 shaderUniformTexelBufferArrayDynamicIndexing;
+ VkBool32 shaderStorageTexelBufferArrayDynamicIndexing;
+ VkBool32 shaderUniformBufferArrayNonUniformIndexing;
+ VkBool32 shaderSampledImageArrayNonUniformIndexing;
+ VkBool32 shaderStorageBufferArrayNonUniformIndexing;
+ VkBool32 shaderStorageImageArrayNonUniformIndexing;
+ VkBool32 shaderInputAttachmentArrayNonUniformIndexing;
+ VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing;
+ VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing;
+ VkBool32 descriptorBindingUniformBufferUpdateAfterBind;
+ VkBool32 descriptorBindingSampledImageUpdateAfterBind;
+ VkBool32 descriptorBindingStorageImageUpdateAfterBind;
+ VkBool32 descriptorBindingStorageBufferUpdateAfterBind;
+ VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
+ VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
+ VkBool32 descriptorBindingUpdateUnusedWhilePending;
+ VkBool32 descriptorBindingPartiallyBound;
+ VkBool32 descriptorBindingVariableDescriptorCount;
+ VkBool32 runtimeDescriptorArray;
+} VkPhysicalDeviceDescriptorIndexingFeatures;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingProperties {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxUpdateAfterBindDescriptorsInAllPools;
+ VkBool32 shaderUniformBufferArrayNonUniformIndexingNative;
+ VkBool32 shaderSampledImageArrayNonUniformIndexingNative;
+ VkBool32 shaderStorageBufferArrayNonUniformIndexingNative;
+ VkBool32 shaderStorageImageArrayNonUniformIndexingNative;
+ VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative;
+ VkBool32 robustBufferAccessUpdateAfterBind;
+ VkBool32 quadDivergentImplicitLod;
+ uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+ uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
+ uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
+ uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
+ uint32_t maxPerStageUpdateAfterBindResources;
+ uint32_t maxDescriptorSetUpdateAfterBindSamplers;
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
+ uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+ uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
+ uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
+ uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
+} VkPhysicalDeviceDescriptorIndexingProperties;
+
+typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t descriptorSetCount;
+ const uint32_t* pDescriptorCounts;
+} VkDescriptorSetVariableDescriptorCountAllocateInfo;
+
+typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport {
+ VkStructureType sType;
+ void* pNext;
+ uint32_t maxVariableDescriptorCount;
+} VkDescriptorSetVariableDescriptorCountLayoutSupport;
+
+typedef struct VkSubpassDescriptionDepthStencilResolve {
+ VkStructureType sType;
+ const void* pNext;
+ VkResolveModeFlagBits depthResolveMode;
+ VkResolveModeFlagBits stencilResolveMode;
+ const VkAttachmentReference2* pDepthStencilResolveAttachment;
+} VkSubpassDescriptionDepthStencilResolve;
+
+typedef struct VkPhysicalDeviceDepthStencilResolveProperties {
+ VkStructureType sType;
+ void* pNext;
+ VkResolveModeFlags supportedDepthResolveModes;
+ VkResolveModeFlags supportedStencilResolveModes;
+ VkBool32 independentResolveNone;
+ VkBool32 independentResolve;
+} VkPhysicalDeviceDepthStencilResolveProperties;
+
+typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 scalarBlockLayout;
+} VkPhysicalDeviceScalarBlockLayoutFeatures;
+
+typedef struct VkImageStencilUsageCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageUsageFlags stencilUsage;
+} VkImageStencilUsageCreateInfo;
+
+typedef struct VkSamplerReductionModeCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkSamplerReductionMode reductionMode;
+} VkSamplerReductionModeCreateInfo;
+
+typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 filterMinmaxSingleComponentFormats;
+ VkBool32 filterMinmaxImageComponentMapping;
+} VkPhysicalDeviceSamplerFilterMinmaxProperties;
+
+typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 vulkanMemoryModel;
+ VkBool32 vulkanMemoryModelDeviceScope;
+ VkBool32 vulkanMemoryModelAvailabilityVisibilityChains;
+} VkPhysicalDeviceVulkanMemoryModelFeatures;
+
+typedef struct VkPhysicalDeviceImagelessFramebufferFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 imagelessFramebuffer;
+} VkPhysicalDeviceImagelessFramebufferFeatures;
+
+typedef struct VkFramebufferAttachmentImageInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkImageCreateFlags flags;
+ VkImageUsageFlags usage;
+ uint32_t width;
+ uint32_t height;
+ uint32_t layerCount;
+ uint32_t viewFormatCount;
+ const VkFormat* pViewFormats;
+} VkFramebufferAttachmentImageInfo;
+
+typedef struct VkFramebufferAttachmentsCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t attachmentImageInfoCount;
+ const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos;
+} VkFramebufferAttachmentsCreateInfo;
+
+typedef struct VkRenderPassAttachmentBeginInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t attachmentCount;
+ const VkImageView* pAttachments;
+} VkRenderPassAttachmentBeginInfo;
+
+typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 uniformBufferStandardLayout;
+} VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 shaderSubgroupExtendedTypes;
+} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 separateDepthStencilLayouts;
+} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+typedef struct VkAttachmentReferenceStencilLayout {
+ VkStructureType sType;
+ void* pNext;
+ VkImageLayout stencilLayout;
+} VkAttachmentReferenceStencilLayout;
+
+typedef struct VkAttachmentDescriptionStencilLayout {
+ VkStructureType sType;
+ void* pNext;
+ VkImageLayout stencilInitialLayout;
+ VkImageLayout stencilFinalLayout;
+} VkAttachmentDescriptionStencilLayout;
+
+typedef struct VkPhysicalDeviceHostQueryResetFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 hostQueryReset;
+} VkPhysicalDeviceHostQueryResetFeatures;
+
+typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 timelineSemaphore;
+} VkPhysicalDeviceTimelineSemaphoreFeatures;
+
+typedef struct VkPhysicalDeviceTimelineSemaphoreProperties {
+ VkStructureType sType;
+ void* pNext;
+ uint64_t maxTimelineSemaphoreValueDifference;
+} VkPhysicalDeviceTimelineSemaphoreProperties;
+
+typedef struct VkSemaphoreTypeCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkSemaphoreType semaphoreType;
+ uint64_t initialValue;
+} VkSemaphoreTypeCreateInfo;
+
+typedef struct VkTimelineSemaphoreSubmitInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t waitSemaphoreValueCount;
+ const uint64_t* pWaitSemaphoreValues;
+ uint32_t signalSemaphoreValueCount;
+ const uint64_t* pSignalSemaphoreValues;
+} VkTimelineSemaphoreSubmitInfo;
+
+typedef struct VkSemaphoreWaitInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkSemaphoreWaitFlags flags;
+ uint32_t semaphoreCount;
+ const VkSemaphore* pSemaphores;
+ const uint64_t* pValues;
+} VkSemaphoreWaitInfo;
+
+typedef struct VkSemaphoreSignalInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkSemaphore semaphore;
+ uint64_t value;
+} VkSemaphoreSignalInfo;
+
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 bufferDeviceAddress;
+ VkBool32 bufferDeviceAddressCaptureReplay;
+ VkBool32 bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeatures;
+
+typedef struct VkBufferDeviceAddressInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkBuffer buffer;
+} VkBufferDeviceAddressInfo;
+
+typedef struct VkBufferOpaqueCaptureAddressCreateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint64_t opaqueCaptureAddress;
+} VkBufferOpaqueCaptureAddressCreateInfo;
+
+typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo {
+ VkStructureType sType;
+ const void* pNext;
+ uint64_t opaqueCaptureAddress;
+} VkMemoryOpaqueCaptureAddressAllocateInfo;
+
+typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo {
+ VkStructureType sType;
+ const void* pNext;
+ VkDeviceMemory memory;
+} VkDeviceMemoryOpaqueCaptureAddressInfo;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(
+ VkCommandBuffer commandBuffer,
+ VkBuffer buffer,
+ VkDeviceSize offset,
+ VkBuffer countBuffer,
+ VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(
+ VkCommandBuffer commandBuffer,
+ VkBuffer buffer,
+ VkDeviceSize offset,
+ VkBuffer countBuffer,
+ VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount,
+ uint32_t stride);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(
+ VkDevice device,
+ const VkRenderPassCreateInfo2* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkRenderPass* pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(
+ VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo* pRenderPassBegin,
+ const VkSubpassBeginInfo* pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(
+ VkCommandBuffer commandBuffer,
+ const VkSubpassBeginInfo* pSubpassBeginInfo,
+ const VkSubpassEndInfo* pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(
+ VkCommandBuffer commandBuffer,
+ const VkSubpassEndInfo* pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(
+ VkDevice device,
+ VkQueryPool queryPool,
+ uint32_t firstQuery,
+ uint32_t queryCount);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(
+ VkDevice device,
+ VkSemaphore semaphore,
+ uint64_t* pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(
+ VkDevice device,
+ const VkSemaphoreWaitInfo* pWaitInfo,
+ uint64_t timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(
+ VkDevice device,
+ const VkSemaphoreSignalInfo* pSignalInfo);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress(
+ VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(
+ VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(
+ VkDevice device,
+ const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+#endif
+
+
#define VK_KHR_surface 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR)
#define VK_KHR_SURFACE_SPEC_VERSION 25
@@ -5605,14 +6444,9 @@ VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR(
#define VK_KHR_shader_float16_int8 1
#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1
#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8"
-typedef struct VkPhysicalDeviceShaderFloat16Int8FeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderFloat16;
- VkBool32 shaderInt8;
-} VkPhysicalDeviceShaderFloat16Int8FeaturesKHR;
+typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR;
-typedef VkPhysicalDeviceShaderFloat16Int8FeaturesKHR VkPhysicalDeviceFloat16Int8FeaturesKHR;
+typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR;
@@ -5686,144 +6520,58 @@ VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR(
#define VK_KHR_imageless_framebuffer 1
#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1
#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer"
-typedef struct VkPhysicalDeviceImagelessFramebufferFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 imagelessFramebuffer;
-} VkPhysicalDeviceImagelessFramebufferFeaturesKHR;
+typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR;
-typedef struct VkFramebufferAttachmentImageInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkImageCreateFlags flags;
- VkImageUsageFlags usage;
- uint32_t width;
- uint32_t height;
- uint32_t layerCount;
- uint32_t viewFormatCount;
- const VkFormat* pViewFormats;
-} VkFramebufferAttachmentImageInfoKHR;
+typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR;
-typedef struct VkFramebufferAttachmentsCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachmentImageInfoCount;
- const VkFramebufferAttachmentImageInfoKHR* pAttachmentImageInfos;
-} VkFramebufferAttachmentsCreateInfoKHR;
+typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR;
-typedef struct VkRenderPassAttachmentBeginInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachmentCount;
- const VkImageView* pAttachments;
-} VkRenderPassAttachmentBeginInfoKHR;
+typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR;
#define VK_KHR_create_renderpass2 1
#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1
#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2"
-typedef struct VkAttachmentDescription2KHR {
- VkStructureType sType;
- const void* pNext;
- VkAttachmentDescriptionFlags flags;
- VkFormat format;
- VkSampleCountFlagBits samples;
- VkAttachmentLoadOp loadOp;
- VkAttachmentStoreOp storeOp;
- VkAttachmentLoadOp stencilLoadOp;
- VkAttachmentStoreOp stencilStoreOp;
- VkImageLayout initialLayout;
- VkImageLayout finalLayout;
-} VkAttachmentDescription2KHR;
+typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR;
-typedef struct VkAttachmentReference2KHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t attachment;
- VkImageLayout layout;
- VkImageAspectFlags aspectMask;
-} VkAttachmentReference2KHR;
+typedef VkAttachmentDescription2 VkAttachmentDescription2KHR;
-typedef struct VkSubpassDescription2KHR {
- VkStructureType sType;
- const void* pNext;
- VkSubpassDescriptionFlags flags;
- VkPipelineBindPoint pipelineBindPoint;
- uint32_t viewMask;
- uint32_t inputAttachmentCount;
- const VkAttachmentReference2KHR* pInputAttachments;
- uint32_t colorAttachmentCount;
- const VkAttachmentReference2KHR* pColorAttachments;
- const VkAttachmentReference2KHR* pResolveAttachments;
- const VkAttachmentReference2KHR* pDepthStencilAttachment;
- uint32_t preserveAttachmentCount;
- const uint32_t* pPreserveAttachments;
-} VkSubpassDescription2KHR;
-
-typedef struct VkSubpassDependency2KHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t srcSubpass;
- uint32_t dstSubpass;
- VkPipelineStageFlags srcStageMask;
- VkPipelineStageFlags dstStageMask;
- VkAccessFlags srcAccessMask;
- VkAccessFlags dstAccessMask;
- VkDependencyFlags dependencyFlags;
- int32_t viewOffset;
-} VkSubpassDependency2KHR;
+typedef VkAttachmentReference2 VkAttachmentReference2KHR;
-typedef struct VkRenderPassCreateInfo2KHR {
- VkStructureType sType;
- const void* pNext;
- VkRenderPassCreateFlags flags;
- uint32_t attachmentCount;
- const VkAttachmentDescription2KHR* pAttachments;
- uint32_t subpassCount;
- const VkSubpassDescription2KHR* pSubpasses;
- uint32_t dependencyCount;
- const VkSubpassDependency2KHR* pDependencies;
- uint32_t correlatedViewMaskCount;
- const uint32_t* pCorrelatedViewMasks;
-} VkRenderPassCreateInfo2KHR;
-
-typedef struct VkSubpassBeginInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSubpassContents contents;
-} VkSubpassBeginInfoKHR;
+typedef VkSubpassDescription2 VkSubpassDescription2KHR;
-typedef struct VkSubpassEndInfoKHR {
- VkStructureType sType;
- const void* pNext;
-} VkSubpassEndInfoKHR;
+typedef VkSubpassDependency2 VkSubpassDependency2KHR;
+
+typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR;
+
+typedef VkSubpassEndInfo VkSubpassEndInfoKHR;
-typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
-typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
-typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo);
-typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR(
VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
+ const VkRenderPassCreateInfo2* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkRenderPass* pRenderPass);
VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR(
VkCommandBuffer commandBuffer,
const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo);
+ const VkSubpassBeginInfo* pSubpassBeginInfo);
VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR(
VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
+ const VkSubpassBeginInfo* pSubpassBeginInfo,
+ const VkSubpassEndInfo* pSubpassEndInfo);
VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR(
VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo);
+ const VkSubpassEndInfo* pSubpassEndInfo);
#endif
@@ -5915,6 +6663,153 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR(
#endif
+#define VK_KHR_performance_query 1
+#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1
+#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query"
+
+typedef enum VkPerformanceCounterUnitKHR {
+ VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0,
+ VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1,
+ VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2,
+ VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3,
+ VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4,
+ VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5,
+ VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6,
+ VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7,
+ VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8,
+ VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9,
+ VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10,
+ VK_PERFORMANCE_COUNTER_UNIT_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+ VK_PERFORMANCE_COUNTER_UNIT_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR,
+ VK_PERFORMANCE_COUNTER_UNIT_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR - VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR + 1),
+ VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterUnitKHR;
+
+typedef enum VkPerformanceCounterScopeKHR {
+ VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0,
+ VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1,
+ VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2,
+ VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+ VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+ VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+ VK_PERFORMANCE_COUNTER_SCOPE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+ VK_PERFORMANCE_COUNTER_SCOPE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+ VK_PERFORMANCE_COUNTER_SCOPE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR - VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR + 1),
+ VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterScopeKHR;
+
+typedef enum VkPerformanceCounterStorageKHR {
+ VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0,
+ VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1,
+ VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2,
+ VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3,
+ VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4,
+ VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5,
+ VK_PERFORMANCE_COUNTER_STORAGE_BEGIN_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+ VK_PERFORMANCE_COUNTER_STORAGE_END_RANGE_KHR = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR,
+ VK_PERFORMANCE_COUNTER_STORAGE_RANGE_SIZE_KHR = (VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR - VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR + 1),
+ VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterStorageKHR;
+
+typedef enum VkPerformanceCounterDescriptionFlagBitsKHR {
+ VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = 0x00000001,
+ VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = 0x00000002,
+ VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterDescriptionFlagBitsKHR;
+typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR;
+
+typedef enum VkAcquireProfilingLockFlagBitsKHR {
+ VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAcquireProfilingLockFlagBitsKHR;
+typedef VkFlags VkAcquireProfilingLockFlagsKHR;
+typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 performanceCounterQueryPools;
+ VkBool32 performanceCounterMultipleQueryPools;
+} VkPhysicalDevicePerformanceQueryFeaturesKHR;
+
+typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR {
+ VkStructureType sType;
+ void* pNext;
+ VkBool32 allowCommandBufferQueryCopies;
+} VkPhysicalDevicePerformanceQueryPropertiesKHR;
+
+typedef struct VkPerformanceCounterKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPerformanceCounterUnitKHR unit;
+ VkPerformanceCounterScopeKHR scope;
+ VkPerformanceCounterStorageKHR storage;
+ uint8_t uuid[VK_UUID_SIZE];
+} VkPerformanceCounterKHR;
+
+typedef struct VkPerformanceCounterDescriptionKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkPerformanceCounterDescriptionFlagsKHR flags;
+ char name[VK_MAX_DESCRIPTION_SIZE];
+ char category[VK_MAX_DESCRIPTION_SIZE];
+ char description[VK_MAX_DESCRIPTION_SIZE];
+} VkPerformanceCounterDescriptionKHR;
+
+typedef struct VkQueryPoolPerformanceCreateInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t queueFamilyIndex;
+ uint32_t counterIndexCount;
+ const uint32_t* pCounterIndices;
+} VkQueryPoolPerformanceCreateInfoKHR;
+
+typedef union VkPerformanceCounterResultKHR {
+ int32_t int32;
+ int64_t int64;
+ uint32_t uint32;
+ uint64_t uint64;
+ float float32;
+ double float64;
+} VkPerformanceCounterResultKHR;
+
+typedef struct VkAcquireProfilingLockInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ VkAcquireProfilingLockFlagsKHR flags;
+ uint64_t timeout;
+} VkAcquireProfilingLockInfoKHR;
+
+typedef struct VkPerformanceQuerySubmitInfoKHR {
+ VkStructureType sType;
+ const void* pNext;
+ uint32_t counterPassIndex;
+} VkPerformanceQuerySubmitInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ uint32_t* pCounterCount,
+ VkPerformanceCounterKHR* pCounters,
+ VkPerformanceCounterDescriptionKHR* pCounterDescriptions);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ VkPhysicalDevice physicalDevice,
+ const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo,
+ uint32_t* pNumPasses);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR(
+ VkDevice device,
+ const VkAcquireProfilingLockInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR(
+ VkDevice device);
+#endif
+
+
#define VK_KHR_maintenance2 1
#define VK_KHR_MAINTENANCE2_SPEC_VERSION 1
#define VK_KHR_MAINTENANCE2_EXTENSION_NAME "VK_KHR_maintenance2"
@@ -6100,12 +6995,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR(
#define VK_KHR_image_format_list 1
#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1
#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list"
-typedef struct VkImageFormatListCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t viewFormatCount;
- const VkFormat* pViewFormats;
-} VkImageFormatListCreateInfoKHR;
+typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR;
@@ -6219,36 +7109,21 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR(
#define VK_KHR_shader_subgroup_extended_types 1
#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1
#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types"
-typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderSubgroupExtendedTypes;
-} VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
#define VK_KHR_8bit_storage 1
#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1
#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage"
-typedef struct VkPhysicalDevice8BitStorageFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 storageBuffer8BitAccess;
- VkBool32 uniformAndStorageBuffer8BitAccess;
- VkBool32 storagePushConstant8;
-} VkPhysicalDevice8BitStorageFeaturesKHR;
+typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR;
#define VK_KHR_shader_atomic_int64 1
#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1
#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64"
-typedef struct VkPhysicalDeviceShaderAtomicInt64FeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderBufferInt64Atomics;
- VkBool32 shaderSharedInt64Atomics;
-} VkPhysicalDeviceShaderAtomicInt64FeaturesKHR;
+typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR;
@@ -6265,113 +7140,37 @@ typedef struct VkPhysicalDeviceShaderClockFeaturesKHR {
#define VK_KHR_driver_properties 1
-#define VK_MAX_DRIVER_NAME_SIZE_KHR 256
-#define VK_MAX_DRIVER_INFO_SIZE_KHR 256
#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1
#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties"
+#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE
+#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE
+typedef VkDriverId VkDriverIdKHR;
-typedef enum VkDriverIdKHR {
- VK_DRIVER_ID_AMD_PROPRIETARY_KHR = 1,
- VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = 2,
- VK_DRIVER_ID_MESA_RADV_KHR = 3,
- VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = 4,
- VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = 5,
- VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = 6,
- VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = 7,
- VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = 8,
- VK_DRIVER_ID_ARM_PROPRIETARY_KHR = 9,
- VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = 10,
- VK_DRIVER_ID_GGP_PROPRIETARY_KHR = 11,
- VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = 12,
- VK_DRIVER_ID_BEGIN_RANGE_KHR = VK_DRIVER_ID_AMD_PROPRIETARY_KHR,
- VK_DRIVER_ID_END_RANGE_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR,
- VK_DRIVER_ID_RANGE_SIZE_KHR = (VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR - VK_DRIVER_ID_AMD_PROPRIETARY_KHR + 1),
- VK_DRIVER_ID_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkDriverIdKHR;
-typedef struct VkConformanceVersionKHR {
- uint8_t major;
- uint8_t minor;
- uint8_t subminor;
- uint8_t patch;
-} VkConformanceVersionKHR;
+typedef VkConformanceVersion VkConformanceVersionKHR;
-typedef struct VkPhysicalDeviceDriverPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkDriverIdKHR driverID;
- char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR];
- char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR];
- VkConformanceVersionKHR conformanceVersion;
-} VkPhysicalDeviceDriverPropertiesKHR;
+typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR;
#define VK_KHR_shader_float_controls 1
#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4
#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls"
+typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR;
-typedef enum VkShaderFloatControlsIndependenceKHR {
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = 0,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = 1,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = 2,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_BEGIN_RANGE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_END_RANGE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR,
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_RANGE_SIZE_KHR = (VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR - VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR + 1),
- VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkShaderFloatControlsIndependenceKHR;
-typedef struct VkPhysicalDeviceFloatControlsPropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkShaderFloatControlsIndependenceKHR denormBehaviorIndependence;
- VkShaderFloatControlsIndependenceKHR roundingModeIndependence;
- VkBool32 shaderSignedZeroInfNanPreserveFloat16;
- VkBool32 shaderSignedZeroInfNanPreserveFloat32;
- VkBool32 shaderSignedZeroInfNanPreserveFloat64;
- VkBool32 shaderDenormPreserveFloat16;
- VkBool32 shaderDenormPreserveFloat32;
- VkBool32 shaderDenormPreserveFloat64;
- VkBool32 shaderDenormFlushToZeroFloat16;
- VkBool32 shaderDenormFlushToZeroFloat32;
- VkBool32 shaderDenormFlushToZeroFloat64;
- VkBool32 shaderRoundingModeRTEFloat16;
- VkBool32 shaderRoundingModeRTEFloat32;
- VkBool32 shaderRoundingModeRTEFloat64;
- VkBool32 shaderRoundingModeRTZFloat16;
- VkBool32 shaderRoundingModeRTZFloat32;
- VkBool32 shaderRoundingModeRTZFloat64;
-} VkPhysicalDeviceFloatControlsPropertiesKHR;
+typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR;
#define VK_KHR_depth_stencil_resolve 1
#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1
#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve"
+typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR;
-typedef enum VkResolveModeFlagBitsKHR {
- VK_RESOLVE_MODE_NONE_KHR = 0,
- VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = 0x00000001,
- VK_RESOLVE_MODE_AVERAGE_BIT_KHR = 0x00000002,
- VK_RESOLVE_MODE_MIN_BIT_KHR = 0x00000004,
- VK_RESOLVE_MODE_MAX_BIT_KHR = 0x00000008,
- VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkResolveModeFlagBitsKHR;
-typedef VkFlags VkResolveModeFlagsKHR;
-typedef struct VkSubpassDescriptionDepthStencilResolveKHR {
- VkStructureType sType;
- const void* pNext;
- VkResolveModeFlagBitsKHR depthResolveMode;
- VkResolveModeFlagBitsKHR stencilResolveMode;
- const VkAttachmentReference2KHR* pDepthStencilResolveAttachment;
-} VkSubpassDescriptionDepthStencilResolveKHR;
+typedef VkResolveModeFlags VkResolveModeFlagsKHR;
-typedef struct VkPhysicalDeviceDepthStencilResolvePropertiesKHR {
- VkStructureType sType;
- void* pNext;
- VkResolveModeFlagsKHR supportedDepthResolveModes;
- VkResolveModeFlagsKHR supportedStencilResolveModes;
- VkBool32 independentResolveNone;
- VkBool32 independentResolve;
-} VkPhysicalDeviceDepthStencilResolvePropertiesKHR;
+typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR;
+
+typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR;
@@ -6383,68 +7182,27 @@ typedef struct VkPhysicalDeviceDepthStencilResolvePropertiesKHR {
#define VK_KHR_timeline_semaphore 1
#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2
#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore"
+typedef VkSemaphoreType VkSemaphoreTypeKHR;
-typedef enum VkSemaphoreTypeKHR {
- VK_SEMAPHORE_TYPE_BINARY_KHR = 0,
- VK_SEMAPHORE_TYPE_TIMELINE_KHR = 1,
- VK_SEMAPHORE_TYPE_BEGIN_RANGE_KHR = VK_SEMAPHORE_TYPE_BINARY_KHR,
- VK_SEMAPHORE_TYPE_END_RANGE_KHR = VK_SEMAPHORE_TYPE_TIMELINE_KHR,
- VK_SEMAPHORE_TYPE_RANGE_SIZE_KHR = (VK_SEMAPHORE_TYPE_TIMELINE_KHR - VK_SEMAPHORE_TYPE_BINARY_KHR + 1),
- VK_SEMAPHORE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkSemaphoreTypeKHR;
-
-typedef enum VkSemaphoreWaitFlagBitsKHR {
- VK_SEMAPHORE_WAIT_ANY_BIT_KHR = 0x00000001,
- VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
-} VkSemaphoreWaitFlagBitsKHR;
-typedef VkFlags VkSemaphoreWaitFlagsKHR;
-typedef struct VkPhysicalDeviceTimelineSemaphoreFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 timelineSemaphore;
-} VkPhysicalDeviceTimelineSemaphoreFeaturesKHR;
+typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR;
-typedef struct VkPhysicalDeviceTimelineSemaphorePropertiesKHR {
- VkStructureType sType;
- void* pNext;
- uint64_t maxTimelineSemaphoreValueDifference;
-} VkPhysicalDeviceTimelineSemaphorePropertiesKHR;
+typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR;
-typedef struct VkSemaphoreTypeCreateInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphoreTypeKHR semaphoreType;
- uint64_t initialValue;
-} VkSemaphoreTypeCreateInfoKHR;
+typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR;
-typedef struct VkTimelineSemaphoreSubmitInfoKHR {
- VkStructureType sType;
- const void* pNext;
- uint32_t waitSemaphoreValueCount;
- const uint64_t* pWaitSemaphoreValues;
- uint32_t signalSemaphoreValueCount;
- const uint64_t* pSignalSemaphoreValues;
-} VkTimelineSemaphoreSubmitInfoKHR;
+typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR;
-typedef struct VkSemaphoreWaitInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphoreWaitFlagsKHR flags;
- uint32_t semaphoreCount;
- const VkSemaphore* pSemaphores;
- const uint64_t* pValues;
-} VkSemaphoreWaitInfoKHR;
+typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR;
-typedef struct VkSemaphoreSignalInfoKHR {
- VkStructureType sType;
- const void* pNext;
- VkSemaphore semaphore;
- uint64_t value;
-} VkSemaphoreSignalInfoKHR;
+typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR;
+
+typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR;
+
+typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR;
typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
-typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout);
-typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR(
@@ -6454,25 +7212,19 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR(
VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR(
VkDevice device,
- const VkSemaphoreWaitInfoKHR* pWaitInfo,
+ const VkSemaphoreWaitInfo* pWaitInfo,
uint64_t timeout);
VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR(
VkDevice device,
- const VkSemaphoreSignalInfoKHR* pSignalInfo);
+ const VkSemaphoreSignalInfo* pSignalInfo);
#endif
#define VK_KHR_vulkan_memory_model 1
#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3
#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model"
-typedef struct VkPhysicalDeviceVulkanMemoryModelFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 vulkanMemoryModel;
- VkBool32 vulkanMemoryModelDeviceScope;
- VkBool32 vulkanMemoryModelAvailabilityVisibilityChains;
-} VkPhysicalDeviceVulkanMemoryModelFeaturesKHR;
+typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR;
@@ -6495,36 +7247,51 @@ typedef struct VkSurfaceProtectedCapabilitiesKHR {
#define VK_KHR_separate_depth_stencil_layouts 1
#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1
#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts"
-typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 separateDepthStencilLayouts;
-} VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
-typedef struct VkAttachmentReferenceStencilLayoutKHR {
- VkStructureType sType;
- void* pNext;
- VkImageLayout stencilLayout;
-} VkAttachmentReferenceStencilLayoutKHR;
+typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR;
-typedef struct VkAttachmentDescriptionStencilLayoutKHR {
- VkStructureType sType;
- void* pNext;
- VkImageLayout stencilInitialLayout;
- VkImageLayout stencilFinalLayout;
-} VkAttachmentDescriptionStencilLayoutKHR;
+typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR;
#define VK_KHR_uniform_buffer_standard_layout 1
#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1
#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout"
-typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR {
- VkStructureType sType;
- void* pNext;
- VkBool32 uniformBufferStandardLayout;
-} VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+
+
+
+#define VK_KHR_buffer_device_address 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address"
+typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR;
+
+typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR;
+
+typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR;
+typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR;
+
+typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR(
+ VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR(
+ VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR(
+ VkDevice device,
+ const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+#endif
#define VK_KHR_pipeline_executable_properties 1
@@ -7178,7 +7945,7 @@ typedef struct VkValidationFlagsEXT {
#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr"
typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT {
VkStructureType sType;
- const void* pNext;
+ void* pNext;
VkBool32 textureCompressionASTC_HDR;
} VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
@@ -8020,28 +8787,11 @@ VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(
#define VK_EXT_sampler_filter_minmax 1
#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2
#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax"
+typedef VkSamplerReductionMode VkSamplerReductionModeEXT;
-typedef enum VkSamplerReductionModeEXT {
- VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0,
- VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1,
- VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2,
- VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT,
- VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT,
- VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1),
- VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
-} VkSamplerReductionModeEXT;
-typedef struct VkSamplerReductionModeCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkSamplerReductionModeEXT reductionMode;
-} VkSamplerReductionModeCreateInfoEXT;
+typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT;
-typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 filterMinmaxSingleComponentFormats;
- VkBool32 filterMinmaxImageComponentMapping;
-} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
@@ -8400,87 +9150,19 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT(
#define VK_EXT_descriptor_indexing 1
#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2
#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing"
+typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT;
-typedef enum VkDescriptorBindingFlagBitsEXT {
- VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = 0x00000001,
- VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = 0x00000002,
- VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = 0x00000004,
- VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = 0x00000008,
- VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
-} VkDescriptorBindingFlagBitsEXT;
-typedef VkFlags VkDescriptorBindingFlagsEXT;
-typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t bindingCount;
- const VkDescriptorBindingFlagsEXT* pBindingFlags;
-} VkDescriptorSetLayoutBindingFlagsCreateInfoEXT;
+typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT;
-typedef struct VkPhysicalDeviceDescriptorIndexingFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 shaderInputAttachmentArrayDynamicIndexing;
- VkBool32 shaderUniformTexelBufferArrayDynamicIndexing;
- VkBool32 shaderStorageTexelBufferArrayDynamicIndexing;
- VkBool32 shaderUniformBufferArrayNonUniformIndexing;
- VkBool32 shaderSampledImageArrayNonUniformIndexing;
- VkBool32 shaderStorageBufferArrayNonUniformIndexing;
- VkBool32 shaderStorageImageArrayNonUniformIndexing;
- VkBool32 shaderInputAttachmentArrayNonUniformIndexing;
- VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing;
- VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing;
- VkBool32 descriptorBindingUniformBufferUpdateAfterBind;
- VkBool32 descriptorBindingSampledImageUpdateAfterBind;
- VkBool32 descriptorBindingStorageImageUpdateAfterBind;
- VkBool32 descriptorBindingStorageBufferUpdateAfterBind;
- VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind;
- VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind;
- VkBool32 descriptorBindingUpdateUnusedWhilePending;
- VkBool32 descriptorBindingPartiallyBound;
- VkBool32 descriptorBindingVariableDescriptorCount;
- VkBool32 runtimeDescriptorArray;
-} VkPhysicalDeviceDescriptorIndexingFeaturesEXT;
+typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT;
-typedef struct VkPhysicalDeviceDescriptorIndexingPropertiesEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxUpdateAfterBindDescriptorsInAllPools;
- VkBool32 shaderUniformBufferArrayNonUniformIndexingNative;
- VkBool32 shaderSampledImageArrayNonUniformIndexingNative;
- VkBool32 shaderStorageBufferArrayNonUniformIndexingNative;
- VkBool32 shaderStorageImageArrayNonUniformIndexingNative;
- VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative;
- VkBool32 robustBufferAccessUpdateAfterBind;
- VkBool32 quadDivergentImplicitLod;
- uint32_t maxPerStageDescriptorUpdateAfterBindSamplers;
- uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers;
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers;
- uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages;
- uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages;
- uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments;
- uint32_t maxPerStageUpdateAfterBindResources;
- uint32_t maxDescriptorSetUpdateAfterBindSamplers;
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers;
- uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers;
- uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
- uint32_t maxDescriptorSetUpdateAfterBindSampledImages;
- uint32_t maxDescriptorSetUpdateAfterBindStorageImages;
- uint32_t maxDescriptorSetUpdateAfterBindInputAttachments;
-} VkPhysicalDeviceDescriptorIndexingPropertiesEXT;
+typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT;
-typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- uint32_t descriptorSetCount;
- const uint32_t* pDescriptorCounts;
-} VkDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT;
-typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupportEXT {
- VkStructureType sType;
- void* pNext;
- uint32_t maxVariableDescriptorCount;
-} VkDescriptorSetVariableDescriptorCountLayoutSupportEXT;
+typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+
+typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT;
@@ -8913,7 +9595,7 @@ typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV {
#define VK_EXT_filter_cubic 1
-#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 2
+#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3
#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic"
typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT {
VkStructureType sType;
@@ -8925,7 +9607,7 @@ typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT {
VkStructureType sType;
void* pNext;
VkBool32 filterCubic;
- VkBool32 filterCubicMinmax ;
+ VkBool32 filterCubicMinmax;
} VkFilterCubicImageViewImageFormatPropertiesEXT;
@@ -9544,11 +10226,7 @@ typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT {
#define VK_EXT_scalar_block_layout 1
#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1
#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout"
-typedef struct VkPhysicalDeviceScalarBlockLayoutFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 scalarBlockLayout;
-} VkPhysicalDeviceScalarBlockLayoutFeaturesEXT;
+typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT;
@@ -9658,7 +10336,6 @@ typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
#define VK_EXT_buffer_device_address 1
-typedef uint64_t VkDeviceAddress;
#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2
#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address"
typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
@@ -9671,11 +10348,7 @@ typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT;
-typedef struct VkBufferDeviceAddressInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkBuffer buffer;
-} VkBufferDeviceAddressInfoEXT;
+typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT;
typedef struct VkBufferDeviceAddressCreateInfoEXT {
VkStructureType sType;
@@ -9683,23 +10356,54 @@ typedef struct VkBufferDeviceAddressCreateInfoEXT {
VkDeviceAddress deviceAddress;
} VkBufferDeviceAddressCreateInfoEXT;
-typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT(
VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo);
+ const VkBufferDeviceAddressInfo* pInfo);
+#endif
+
+
+#define VK_EXT_tooling_info 1
+#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1
+#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info"
+
+typedef enum VkToolPurposeFlagBitsEXT {
+ VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = 0x00000001,
+ VK_TOOL_PURPOSE_PROFILING_BIT_EXT = 0x00000002,
+ VK_TOOL_PURPOSE_TRACING_BIT_EXT = 0x00000004,
+ VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = 0x00000008,
+ VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = 0x00000010,
+ VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020,
+ VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040,
+ VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkToolPurposeFlagBitsEXT;
+typedef VkFlags VkToolPurposeFlagsEXT;
+typedef struct VkPhysicalDeviceToolPropertiesEXT {
+ VkStructureType sType;
+ void* pNext;
+ char name[VK_MAX_EXTENSION_NAME_SIZE];
+ char version[VK_MAX_EXTENSION_NAME_SIZE];
+ VkToolPurposeFlagsEXT purposes;
+ char description[VK_MAX_DESCRIPTION_SIZE];
+ char layer[VK_MAX_EXTENSION_NAME_SIZE];
+} VkPhysicalDeviceToolPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT(
+ VkPhysicalDevice physicalDevice,
+ uint32_t* pToolCount,
+ VkPhysicalDeviceToolPropertiesEXT* pToolProperties);
#endif
#define VK_EXT_separate_stencil_usage 1
#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1
#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage"
-typedef struct VkImageStencilUsageCreateInfoEXT {
- VkStructureType sType;
- const void* pNext;
- VkImageUsageFlags stencilUsage;
-} VkImageStencilUsageCreateInfoEXT;
+typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT;
@@ -9952,11 +10656,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(
#define VK_EXT_host_query_reset 1
#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1
#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset"
-typedef struct VkPhysicalDeviceHostQueryResetFeaturesEXT {
- VkStructureType sType;
- void* pNext;
- VkBool32 hostQueryReset;
-} VkPhysicalDeviceHostQueryResetFeaturesEXT;
+typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT;
typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
diff --git a/thirdparty/vulkan/loader/extension_manual.c b/thirdparty/vulkan/loader/extension_manual.c
index 490496d7c7..b8118fdc8a 100644
--- a/thirdparty/vulkan/loader/extension_manual.c
+++ b/thirdparty/vulkan/loader/extension_manual.c
@@ -441,3 +441,22 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModes2EXT(
}
#endif // VK_USE_PLATFORM_WIN32_KHR
+
+// ---- VK_EXT_tooling_info extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceToolPropertiesEXT(
+ VkPhysicalDevice physicalDevice,
+ uint32_t* pToolCount,
+ VkPhysicalDeviceToolPropertiesEXT* pToolProperties) {
+ const VkLayerInstanceDispatchTable *disp;
+ VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+ disp = loader_get_instance_layer_dispatch(physicalDevice);
+ return disp->GetPhysicalDeviceToolPropertiesEXT(unwrapped_phys_dev, pToolCount, pToolProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceToolPropertiesEXT(
+ VkPhysicalDevice physicalDevice,
+ uint32_t* pToolCount,
+ VkPhysicalDeviceToolPropertiesEXT* pToolProperties) {
+ return VK_SUCCESS;
+}
diff --git a/thirdparty/vulkan/loader/extension_manual.h b/thirdparty/vulkan/loader/extension_manual.h
index e07b9102dc..fe4287effb 100644
--- a/thirdparty/vulkan/loader/extension_manual.h
+++ b/thirdparty/vulkan/loader/extension_manual.h
@@ -104,3 +104,15 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_GetDeviceGroupSurfacePresentModes2EXT(
VkDevice device,
const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo,
VkDeviceGroupPresentModeFlagsKHR* pModes);
+
+// ---- VK_EXT_tooling_info extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceToolPropertiesEXT(
+ VkPhysicalDevice physicalDevice,
+ uint32_t* pToolCount,
+ VkPhysicalDeviceToolPropertiesEXT* pToolProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_GetPhysicalDeviceToolPropertiesEXT(
+ VkPhysicalDevice physicalDevice,
+ uint32_t* pToolCount,
+ VkPhysicalDeviceToolPropertiesEXT* pToolProperties);
diff --git a/thirdparty/vulkan/loader/gpa_helper.h b/thirdparty/vulkan/loader/gpa_helper.h
index e08898b6d8..832b68c3ef 100644
--- a/thirdparty/vulkan/loader/gpa_helper.h
+++ b/thirdparty/vulkan/loader/gpa_helper.h
@@ -1,8 +1,8 @@
/*
*
- * Copyright (c) 2015 The Khronos Group Inc.
- * Copyright (c) 2015 Valve Corporation
- * Copyright (c) 2015 LunarG, Inc.
+ * Copyright (c) 2015-18, 2020 The Khronos Group Inc.
+ * Copyright (c) 2015-18, 2020 Valve Corporation
+ * Copyright (c) 2015-18, 2020 LunarG, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -190,6 +190,21 @@ static inline void *trampolineGetProcAddr(struct loader_instance *inst, const ch
if (!strcmp(funcName, "vkGetImageSparseMemoryRequirements2")) return vkGetImageSparseMemoryRequirements2;
if (!strcmp(funcName, "vkGetBufferMemoryRequirements2")) return vkGetBufferMemoryRequirements2;
+ // Core 1.2 functions
+ if (!strcmp(funcName, "vkCreateRenderPass2")) return vkCreateRenderPass2;
+ if (!strcmp(funcName, "vkCmdBeginRenderPass2")) return vkCmdBeginRenderPass2;
+ if (!strcmp(funcName, "vkCmdNextSubpass2")) return vkCmdNextSubpass2;
+ if (!strcmp(funcName, "vkCmdEndRenderPass2")) return vkCmdEndRenderPass2;
+ if (!strcmp(funcName, "vkCmdDrawIndirectCount")) return vkCmdDrawIndirectCount;
+ if (!strcmp(funcName, "vkCmdDrawIndexedIndirectCount")) return vkCmdDrawIndexedIndirectCount;
+ if (!strcmp(funcName, "vkGetSemaphoreCounterValue")) return vkGetSemaphoreCounterValue;
+ if (!strcmp(funcName, "vkWaitSemaphores")) return vkWaitSemaphores;
+ if (!strcmp(funcName, "vkSignalSemaphore")) return vkSignalSemaphore;
+ if (!strcmp(funcName, "vkGetBufferDeviceAddress")) return vkGetBufferDeviceAddress;
+ if (!strcmp(funcName, "vkGetBufferOpaqueCaptureAddress")) return vkGetBufferOpaqueCaptureAddress;
+ if (!strcmp(funcName, "vkGetDeviceMemoryOpaqueCaptureAddress")) return vkGetDeviceMemoryOpaqueCaptureAddress;
+ if (!strcmp(funcName, "vkResetQueryPool")) return vkResetQueryPool;
+
// Instance extensions
void *addr;
if (debug_utils_InstanceGpa(inst, funcName, &addr)) return addr;
diff --git a/thirdparty/vulkan/loader/loader.c b/thirdparty/vulkan/loader/loader.c
index 398c44bf9c..c7cdb47122 100644
--- a/thirdparty/vulkan/loader/loader.c
+++ b/thirdparty/vulkan/loader/loader.c
@@ -1,8 +1,8 @@
/*
*
- * Copyright (c) 2014-2019 The Khronos Group Inc.
- * Copyright (c) 2014-2019 Valve Corporation
- * Copyright (c) 2014-2019 LunarG, Inc.
+ * Copyright (c) 2014-2020 The Khronos Group Inc.
+ * Copyright (c) 2014-2020 Valve Corporation
+ * Copyright (c) 2014-2020 LunarG, Inc.
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -126,7 +126,7 @@ LOADER_PLATFORM_THREAD_ONCE_DECLARATION(once_init);
// This loader supports Vulkan API version 1.1
uint32_t loader_major_version = 1;
-uint32_t loader_minor_version = 1;
+uint32_t loader_minor_version = 2;
void *loader_instance_heap_alloc(const struct loader_instance *instance, size_t size, VkSystemAllocationScope alloc_scope) {
void *pMemory = NULL;
@@ -252,6 +252,7 @@ static inline char *loader_getenv(const char *name, const struct loader_instance
}
static inline char *loader_secure_getenv(const char *name, const struct loader_instance *inst) {
+ char *out;
#if defined(__APPLE__)
// Apple does not appear to have a secure getenv implementation.
// The main difference between secure getenv and getenv is that secure getenv
@@ -263,19 +264,21 @@ static inline char *loader_secure_getenv(const char *name, const struct loader_i
return IsHighIntegrity() ? NULL : loader_getenv(name, inst);
#else
// Linux
-#ifdef HAVE_SECURE_GETENV
+#if defined(HAVE_SECURE_GETENV) && !defined(USE_UNSAFE_FILE_SEARCH)
(void)inst;
- return secure_getenv(name);
-#elif defined(HAVE___SECURE_GETENV)
+ out = secure_getenv(name);
+#elif defined(HAVE___SECURE_GETENV) && !defined(USE_UNSAFE_FILE_SEARCH)
(void)inst;
- return __secure_getenv(name);
+ out = __secure_getenv(name);
#else
-#pragma message( \
- "Warning: Falling back to non-secure getenv for environmental lookups! Consider" \
- " updating to a different libc.")
- return loader_getenv(name, inst);
+ out = loader_getenv(name, inst);
#endif
#endif
+ if (out == NULL) {
+ loader_log(inst, LOADER_INFO_BIT, 0,
+ "Loader is running with elevated permissions. Environment variable %s will be ignored.", name);
+ }
+ return out;
}
static inline void loader_free_getenv(char *val, const struct loader_instance *inst) {
@@ -335,9 +338,13 @@ static inline char *loader_getenv(const char *name, const struct loader_instance
}
static inline char *loader_secure_getenv(const char *name, const struct loader_instance *inst) {
+#if !defined(USE_UNSAFE_FILE_SEARCH)
if (IsHighIntegrity()) {
+ loader_log(inst, LOADER_INFO_BIT, 0,
+ "Loader is running with elevated permissions. Environment variable %s will be ignored.", name);
return NULL;
}
+#endif
return loader_getenv(name, inst);
}
@@ -578,7 +585,7 @@ static bool loaderAddJsonEntry(const struct loader_instance *inst,
//
// This function looks for filename in given device handle, filename is then added to return list
// function return true if filename was appended to reg_data list
-// If error occures result is updated with failure reason
+// If error occurs result is updated with failure reason
bool loaderGetDeviceRegistryEntry(const struct loader_instance *inst, char **reg_data, PDWORD total_size, DEVINST dev_id,
LPCSTR value_name, VkResult *result) {
HKEY hkrKey = INVALID_HANDLE_VALUE;
@@ -855,7 +862,6 @@ VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *locati
char name[2048];
char *loc = location;
char *next;
- DWORD idx;
DWORD name_size = sizeof(name);
DWORD value;
DWORD value_size = sizeof(value);
@@ -885,9 +891,9 @@ VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *locati
access_flags = KEY_QUERY_VALUE;
rtn_value = RegOpenKeyEx(hive, loc, 0, access_flags, &key);
if (ERROR_SUCCESS == rtn_value) {
- idx = 0;
- while ((rtn_value = RegEnumValue(key, idx++, name, &name_size, NULL, NULL, (LPBYTE)&value, &value_size)) ==
- ERROR_SUCCESS) {
+ for (DWORD idx = 0;
+ (rtn_value = RegEnumValue(key, idx++, name, &name_size, NULL, NULL, (LPBYTE)&value, &value_size)) == ERROR_SUCCESS;
+ name_size = sizeof(name), value_size = sizeof(value)) {
if (value_size == sizeof(value) && value == 0) {
if (NULL == *reg_data) {
*reg_data = loader_instance_heap_alloc(inst, *reg_data_size, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
@@ -960,7 +966,7 @@ VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *locati
if (!found_gpu) {
loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
- "Dropping driver %s as no corresponduing DXGI adapter was found", name);
+ "Dropping driver %s as no corresponding DXGI adapter was found", name);
continue;
}
}
@@ -972,7 +978,7 @@ VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *locati
found = true;
} else {
// At this point the reg_data variable contains other JSON paths, likely from the PNP/device section
- // of the registry that we want to have precendence over this non-device specific section of the registry.
+ // of the registry that we want to have precedence over this non-device specific section of the registry.
// To make sure we avoid enumerating old JSON files/drivers that might be present in the non-device specific
// area of the registry when a newer device specific JSON file is present, do a check before adding.
// Find the file name, without path, of the JSON file found in the non-device specific registry location.
@@ -999,8 +1005,6 @@ VkResult loaderGetRegistryFiles(const struct loader_instance *inst, char *locati
}
}
}
- name_size = sizeof(name);
- value_size = sizeof(value);
}
RegCloseKey(key);
}
@@ -2045,7 +2049,7 @@ out:
return res;
}
-struct loader_icd_term *loader_get_icd_and_device(const VkDevice device, struct loader_device **found_dev, uint32_t *icd_index) {
+struct loader_icd_term *loader_get_icd_and_device(const void *device, struct loader_device **found_dev, uint32_t *icd_index) {
*found_dev = NULL;
for (struct loader_instance *inst = loader.instances; inst; inst = inst->next) {
uint32_t index = 0;
@@ -2423,7 +2427,7 @@ static void loader_debug_init(void) {
}
void loader_initialize(void) {
- // initialize mutexs
+ // initialize mutexes
loader_platform_thread_create_mutex(&loader_lock);
loader_platform_thread_create_mutex(&loader_json_lock);
@@ -2450,7 +2454,7 @@ struct loader_data_files {
};
void loader_release() {
- // release mutexs
+ // release mutexes
loader_platform_thread_delete_mutex(&loader_lock);
loader_platform_thread_delete_mutex(&loader_json_lock);
}
@@ -2578,60 +2582,6 @@ out:
return res;
}
-const char *std_validation_str = "VK_LAYER_LUNARG_standard_validation";
-
-// Adds the legacy VK_LAYER_LUNARG_standard_validation as a meta-layer if it
-// fails to find it in the list already. This is usually an indication that a
-// newer loader is being used with an older layer set.
-static bool loaderAddLegacyStandardValidationLayer(const struct loader_instance *inst,
- struct loader_layer_list *layer_instance_list) {
- uint32_t i;
- bool success = true;
- struct loader_layer_properties *props = loaderGetNextLayerPropertySlot(inst, layer_instance_list);
- const char std_validation_names[6][VK_MAX_EXTENSION_NAME_SIZE] = {
- "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation", "VK_LAYER_LUNARG_object_tracker",
- "VK_LAYER_LUNARG_core_validation", "VK_LAYER_GOOGLE_unique_objects"};
- uint32_t layer_count = sizeof(std_validation_names) / sizeof(std_validation_names[0]);
-
- loader_log(inst, VK_DEBUG_REPORT_DEBUG_BIT_EXT, 0,
- "Adding VK_LAYER_LUNARG_standard_validation using the loader legacy path. This is"
- " not an error.");
-
- if (NULL == props) {
- goto out;
- }
-
- memset(props, 0, sizeof(struct loader_layer_properties));
- props->type_flags = VK_LAYER_TYPE_FLAG_INSTANCE_LAYER | VK_LAYER_TYPE_FLAG_EXPLICIT_LAYER | VK_LAYER_TYPE_FLAG_META_LAYER;
- strncpy(props->info.description, "LunarG Standard Validation Layer", sizeof(props->info.description));
- props->info.implementationVersion = 1;
- strncpy(props->info.layerName, std_validation_str, sizeof(props->info.layerName));
- props->info.specVersion = VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION);
-
- props->component_layer_names =
- loader_instance_heap_alloc(inst, sizeof(char[MAX_STRING_SIZE]) * layer_count, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
- if (NULL == props->component_layer_names) {
- loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
- "Failed to allocate space for legacy VK_LAYER_LUNARG_standard_validation"
- " meta-layer component_layers information.");
- success = false;
- goto out;
- }
- for (i = 0; i < layer_count; i++) {
- strncpy(props->component_layer_names[i], std_validation_names[i], MAX_STRING_SIZE - 1);
- props->component_layer_names[i][MAX_STRING_SIZE - 1] = '\0';
- }
-
-out:
-
- if (!success && NULL != props && NULL != props->component_layer_names) {
- loader_instance_heap_free(inst, props->component_layer_names);
- props->component_layer_names = NULL;
- }
-
- return success;
-}
-
// Verify that all component layers in a meta-layer are valid.
static bool verifyMetaLayerComponentLayers(const struct loader_instance *inst, struct loader_layer_properties *prop,
struct loader_layer_list *instance_layers) {
@@ -3201,7 +3151,7 @@ static VkResult loaderReadLayerJson(const struct loader_instance *inst, struct l
if (version.major > 1 || version.minor >= 1) {
loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
"Layer \"%s\" using deprecated \'vkGetInstanceProcAddr\' tag which was deprecated starting with JSON "
- "file version 1.1.0. The new vkNegotiateLayerInterfaceVersion function is preferred, though for "
+ "file version 1.1.0. The new vkNegotiateLoaderLayerInterfaceVersion function is preferred, though for "
"compatibility reasons it may be desirable to continue using the deprecated tag.",
name);
}
@@ -3212,7 +3162,7 @@ static VkResult loaderReadLayerJson(const struct loader_instance *inst, struct l
if (version.major > 1 || version.minor >= 1) {
loader_log(inst, VK_DEBUG_REPORT_INFORMATION_BIT_EXT, 0,
"Layer \"%s\" using deprecated \'vkGetDeviceProcAddr\' tag which was deprecated starting with JSON "
- "file version 1.1.0. The new vkNegotiateLayerInterfaceVersion function is preferred, though for "
+ "file version 1.1.0. The new vkNegotiateLoaderLayerInterfaceVersion function is preferred, though for "
"compatibility reasons it may be desirable to continue using the deprecated tag.",
name);
}
@@ -3949,7 +3899,7 @@ out:
}
#ifdef _WIN32
-// Read manifest JSON files uing the Windows driver interface
+// Read manifest JSON files using the Windows driver interface
static VkResult ReadManifestsFromD3DAdapters(const struct loader_instance *inst, char **reg_data, PDWORD reg_data_size,
const wchar_t *value_name) {
VkResult result = VK_INCOMPLETE;
@@ -3994,7 +3944,7 @@ static VkResult ReadManifestsFromD3DAdapters(const struct loader_instance *inst,
.value_type = REG_MULTI_SZ,
.physical_adapter_index = 0,
};
- wcsncpy(filename_info.value_name, value_name, sizeof(filename_info.value_name) / sizeof(DWORD));
+ wcsncpy(filename_info.value_name, value_name, sizeof(filename_info.value_name) / sizeof(WCHAR));
LoaderQueryAdapterInfo query_info = {
.handle = adapters.adapters[i].handle,
.type = LOADER_QUERY_TYPE_REGISTRY,
@@ -4603,23 +4553,6 @@ void loaderScanForLayers(struct loader_instance *inst, struct loader_layer_list
}
}
- // See if "VK_LAYER_LUNARG_standard_validation" already in list.
- bool found_std_val = false;
- for (uint32_t i = 0; i < instance_layers->count; i++) {
- struct loader_layer_properties *props = &instance_layers->list[i];
- if (strcmp(props->info.layerName, std_validation_str) == 0) {
- found_std_val = true;
- break;
- }
- }
-
- // If we didn't find the VK_LAYER_LUNARG_standard_validation meta-layer in
- // the list, then we need to add it manually. This is likely because we're
- // dealing with a new loader, but an old layer folder.
- if (!found_std_val && !loaderAddLegacyStandardValidationLayer(inst, instance_layers)) {
- goto out;
- }
-
// Verify any meta-layers in the list are valid and all the component layers are
// actually present in the available layer list
VerifyAllMetaLayers(inst, instance_layers, &override_layer_valid);
@@ -4853,6 +4786,32 @@ static VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL loader_gpa_instance_internal(VkI
return (PFN_vkVoidFunction)terminator_CreateDevice;
}
+ // The VK_EXT_debug_utils functions need a special case here so the terminators can still be found from vkGetInstanceProcAddr
+ if (!strcmp(pName, "vkSetDebugUtilsObjectNameEXT")) {
+ return (PFN_vkVoidFunction)terminator_SetDebugUtilsObjectNameEXT;
+ }
+ if (!strcmp(pName, "vkSetDebugUtilsObjectTagEXT")) {
+ return (PFN_vkVoidFunction)terminator_SetDebugUtilsObjectTagEXT;
+ }
+ if (!strcmp(pName, "vkQueueBeginDebugUtilsLabelEXT")) {
+ return (PFN_vkVoidFunction)terminator_QueueBeginDebugUtilsLabelEXT;
+ }
+ if (!strcmp(pName, "vkQueueEndDebugUtilsLabelEXT")) {
+ return (PFN_vkVoidFunction)terminator_QueueEndDebugUtilsLabelEXT;
+ }
+ if (!strcmp(pName, "vkQueueInsertDebugUtilsLabelEXT")) {
+ return (PFN_vkVoidFunction)terminator_QueueInsertDebugUtilsLabelEXT;
+ }
+ if (!strcmp(pName, "vkCmdBeginDebugUtilsLabelEXT")) {
+ return (PFN_vkVoidFunction)terminator_CmdBeginDebugUtilsLabelEXT;
+ }
+ if (!strcmp(pName, "vkCmdEndDebugUtilsLabelEXT")) {
+ return (PFN_vkVoidFunction)terminator_CmdEndDebugUtilsLabelEXT;
+ }
+ if (!strcmp(pName, "vkCmdInsertDebugUtilsLabelEXT")) {
+ return (PFN_vkVoidFunction)terminator_CmdInsertDebugUtilsLabelEXT;
+ }
+
// inst is not wrapped
if (inst == VK_NULL_HANDLE) {
return NULL;
@@ -5510,8 +5469,6 @@ VKAPI_ATTR VkResult VKAPI_CALL loader_layer_create_device(VkInstance instance, V
struct loader_device *dev = NULL;
struct loader_instance *inst = NULL;
- assert(pCreateInfo->queueCreateInfoCount >= 1);
-
if (instance != NULL) {
inst = loader_get_instance(instance);
internal_device = physicalDevice;
@@ -6187,7 +6144,7 @@ VkResult loader_validate_device_extensions(struct loader_instance *this_instance
}
// Terminator functions for the Instance chain
-// All named terminator_<Vulakn API name>
+// All named terminator_<Vulkan API name>
VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
struct loader_icd_term *icd_term;
@@ -7204,6 +7161,10 @@ VkStringErrorFlags vk_string_validate(const int max_length, const char *utf8) {
int num_char_bytes = 0;
int i, j;
+ if (utf8 == NULL) {
+ return VK_STRING_ERROR_NULL_PTR;
+ }
+
for (i = 0; i <= max_length; i++) {
if (utf8[i] == 0) {
break;
diff --git a/thirdparty/vulkan/loader/loader.h b/thirdparty/vulkan/loader/loader.h
index 8d6b4c454a..56745a968d 100644
--- a/thirdparty/vulkan/loader/loader.h
+++ b/thirdparty/vulkan/loader/loader.h
@@ -67,6 +67,7 @@ typedef enum VkStringErrorFlagBits {
VK_STRING_ERROR_NONE = 0x00000000,
VK_STRING_ERROR_LENGTH = 0x00000001,
VK_STRING_ERROR_BAD_DATA = 0x00000002,
+ VK_STRING_ERROR_NULL_PTR = 0x00000004,
} VkStringErrorFlagBits;
typedef VkFlags VkStringErrorFlags;
@@ -480,7 +481,7 @@ void loaderScanForImplicitLayers(struct loader_instance *inst, struct loader_lay
bool loaderImplicitLayerIsEnabled(const struct loader_instance *inst, const struct loader_layer_properties *prop);
VkResult loader_get_icd_loader_instance_extensions(const struct loader_instance *inst, struct loader_icd_tramp_list *icd_tramp_list,
struct loader_extension_list *inst_exts);
-struct loader_icd_term *loader_get_icd_and_device(const VkDevice device, struct loader_device **found_dev, uint32_t *icd_index);
+struct loader_icd_term *loader_get_icd_and_device(const void *device, struct loader_device **found_dev, uint32_t *icd_index);
void loader_init_dispatch_dev_ext(struct loader_instance *inst, struct loader_device *dev);
void *loader_dev_ext_gpa(struct loader_instance *inst, const char *funcName);
void *loader_get_dev_ext_trampoline(uint32_t index);
diff --git a/thirdparty/vulkan/loader/trampoline.c b/thirdparty/vulkan/loader/trampoline.c
index 52eea968e8..a6a37f0fce 100644
--- a/thirdparty/vulkan/loader/trampoline.c
+++ b/thirdparty/vulkan/loader/trampoline.c
@@ -135,6 +135,12 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionPropert
}
loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
+ if (layer_lib == NULL) {
+ loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "%s: Unable to load implicit layer library \"%s\"", __FUNCTION__,
+ layers.list[i].lib_name);
+ continue;
+ }
+
libs[lib_count++] = layer_lib;
void *pfn = loader_platform_get_proc_address(layer_lib,
layers.list[i].pre_instance_functions.enumerate_instance_extension_properties);
@@ -223,6 +229,12 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
}
loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
+ if (layer_lib == NULL) {
+ loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "%s: Unable to load implicit layer library \"%s\"", __FUNCTION__,
+ layers.list[i].lib_name);
+ continue;
+ }
+
libs[lib_count++] = layer_lib;
void *pfn =
loader_platform_get_proc_address(layer_lib, layers.list[i].pre_instance_functions.enumerate_instance_layer_properties);
@@ -311,6 +323,12 @@ LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t
}
loader_platform_dl_handle layer_lib = loader_platform_open_library(layers.list[i].lib_name);
+ if (layer_lib == NULL) {
+ loader_log(NULL, VK_DEBUG_REPORT_WARNING_BIT_EXT, 0, "%s: Unable to load implicit layer library \"%s\"", __FUNCTION__,
+ layers.list[i].lib_name);
+ continue;
+ }
+
libs[lib_count++] = layer_lib;
void *pfn = loader_platform_get_proc_address(layer_lib,
layers.list[i].pre_instance_functions.enumerate_instance_version);
@@ -2478,3 +2496,98 @@ LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDev
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
disp->UpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
}
+
+// ---- Vulkan core 1.2 trampolines
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->CreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo* pRenderPassBegin,
+ const VkSubpassBeginInfo* pSubpassBeginInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+ disp->CmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer,
+ const VkSubpassBeginInfo* pSubpassBeginInfo,
+ const VkSubpassEndInfo* pSubpassEndInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+ disp->CmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+ disp->CmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
+ VkBuffer countBuffer, VkDeviceSize countBufferOffset,
+ uint32_t maxDrawCount, uint32_t stride)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+ disp->CmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, VkBuffer countBuffer,
+ VkDeviceSize countBufferOffset, uint32_t maxDrawCount,
+ uint32_t stride)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
+ disp->CmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t* pValue)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetSemaphoreCounterValue(device, semaphore, pValue);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo,
+ uint64_t timeout)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->WaitSemaphores(device, pWaitInfo, timeout);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->SignalSemaphore(device, pSignalInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetBufferDeviceAddress(device, pInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetBufferOpaqueCaptureAddress(device, pInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,
+ const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetDeviceMemoryOpaqueCaptureAddress(device, pInfo);
+}
+
+LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount)
+{
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ disp->ResetQueryPool(device, queryPool, firstQuery, queryCount);
+}
diff --git a/thirdparty/vulkan/loader/unknown_ext_chain_gas.S b/thirdparty/vulkan/loader/unknown_ext_chain_gas.S
deleted file mode 100644
index f847e1407d..0000000000
--- a/thirdparty/vulkan/loader/unknown_ext_chain_gas.S
+++ /dev/null
@@ -1,885 +0,0 @@
-#
-# Copyright (c) 2017 The Khronos Group Inc.
-# Copyright (c) 2017 Valve Corporation
-# Copyright (c) 2017 LunarG, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Author: Lenny Komow <lenny@lunarg.com>
-#
-
-# This code is used to pass on device (including physical device) extensions through the call chain. It must do this without
-# creating a stack frame, because the actual parameters of the call are not known. Since the first parameter is known to be a
-# VkPhysicalDevice or a dispatchable object it can unwrap the object, possibly overwriting the wrapped physical device, and then
-# jump to the next function in the call chain
-
-#ifdef HAVE_CET_H
-#include <cet.h>
-#else
-#define _CET_ENDBR
-#endif
-
-.intel_syntax noprefix
-.include "gen_defines.asm"
-
-.ifdef X86_64
-
-.macro PhysDevExtTramp num
-.global vkPhysDevExtTramp\num
-vkPhysDevExtTramp\num:
- _CET_ENDBR
- mov rax, [rdi]
- mov rdi, [rdi + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP]
- jmp [rax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * \num))]
-.endm
-
-.macro PhysDevExtTermin num
-.global vkPhysDevExtTermin\num
-vkPhysDevExtTermin\num:
- _CET_ENDBR
- mov rax, [rdi + ICD_TERM_OFFSET_PHYS_DEV_TERM] # Store the loader_icd_term* in rax
- cmp qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))], 0 # Check if the next function in the chain is NULL
- je terminError\num # Go to the error section if it is NULL
- mov rdi, [rdi + PHYS_DEV_OFFSET_PHYS_DEV_TERM] # Load the unwrapped VkPhysicalDevice into the first arg
- jmp [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))] # Jump to the next function in the chain
-terminError\num:
- sub rsp, 56 # Create the stack frame
- mov rdi, [rax + INSTANCE_OFFSET_ICD_TERM] # Load the loader_instance into rdi (first arg)
- mov r8, [rdi + (HASH_OFFSET_INSTANCE + (HASH_SIZE * \num) + FUNC_NAME_OFFSET_HASH)] # Load the func name into r8 (fifth arg)
- lea rcx, termin_error_string@GOTPCREL # Load the error string into rcx (fourth arg)
- xor edx, edx # Set rdx to zero (third arg)
- lea esi, [rdx + VK_DEBUG_REPORT_ERROR_BIT_EXT] # Write the error logging bit to rsi (second arg)
- call loader_log # Log the error message before we crash
- add rsp, 56 # Clean up the stack frame
- mov rax, 0
- jmp rax # Crash intentionally by jumping to address zero
-.endm
-
-.macro DevExtTramp num
-.global vkdev_ext\num
-vkdev_ext\num:
- _CET_ENDBR
- mov rax, [rdi] # Dereference the handle to get the dispatch table
- jmp [rax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * \num))] # Jump to the appropriate call chain
-.endm
-
-.else
-
-.macro PhysDevExtTramp num
-.global vkPhysDevExtTramp\num
-vkPhysDevExtTramp\num:
- _CET_ENDBR
- mov eax, [esp + 4] # Load the wrapped VkPhysicalDevice into eax
- mov ecx, [eax + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] # Load the unwrapped VkPhysicalDevice into ecx
- mov [esp + 4], ecx # Overwrite the wrapped VkPhysicalDevice with the unwrapped one (on the stack)
- mov eax, [eax] # Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
- jmp [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * \num))] # Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
-.endm
-
-.macro PhysDevExtTermin num
-.global vkPhysDevExtTermin\num
-vkPhysDevExtTermin\num:
- _CET_ENDBR
- mov ecx, [esp + 4] # Move the wrapped VkPhysicalDevice into ecx
- mov eax, [ecx + ICD_TERM_OFFSET_PHYS_DEV_TERM] # Store the loader_icd_term* in eax
- cmp dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))], 0 # Check if the next function in the chain is NULL
- je terminError\num # Go to the error section if it is NULL
- mov ecx, [ecx + PHYS_DEV_OFFSET_PHYS_DEV_TERM] # Unwrap the VkPhysicalDevice in ecx
- mov [esp + 4], ecx # Copy the unwrapped VkPhysicalDevice into the first arg
- jmp [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * \num))] # Jump to the next function in the chain
-terminError\num:
- mov eax, [eax + INSTANCE_OFFSET_ICD_TERM] # Load the loader_instance into eax
- push [eax + (HASH_OFFSET_INSTANCE + (HASH_SIZE * \num) + FUNC_NAME_OFFSET_HASH)] # Push the func name (fifth arg)
- push offset termin_error_string@GOT # Push the error string (fourth arg)
- push 0 # Push zero (third arg)
- push VK_DEBUG_REPORT_ERROR_BIT_EXT # Push the error logging bit (second arg)
- push eax # Push the loader_instance (first arg)
- call loader_log # Log the error message before we crash
- add esp, 20 # Clean up the args
- mov eax, 0
- jmp eax # Crash intentionally by jumping to address zero
-.endm
-
-.macro DevExtTramp num
-.global vkdev_ext\num
-vkdev_ext\num:
- _CET_ENDBR
- mov eax, [esp + 4] # Dereference the handle to get the dispatch table
- jmp [eax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * \num))] # Jump to the appropriate call chain
-.endm
-
-.endif
-
-#if defined(__ELF__)
-.section .note.GNU-stack,"",%progbits
-#endif
-
-.data
-
-termin_error_string:
-.string "Extension %s not supported for this physical device"
-
-.text
-
- PhysDevExtTramp 0
- PhysDevExtTramp 1
- PhysDevExtTramp 2
- PhysDevExtTramp 3
- PhysDevExtTramp 4
- PhysDevExtTramp 5
- PhysDevExtTramp 6
- PhysDevExtTramp 7
- PhysDevExtTramp 8
- PhysDevExtTramp 9
- PhysDevExtTramp 10
- PhysDevExtTramp 11
- PhysDevExtTramp 12
- PhysDevExtTramp 13
- PhysDevExtTramp 14
- PhysDevExtTramp 15
- PhysDevExtTramp 16
- PhysDevExtTramp 17
- PhysDevExtTramp 18
- PhysDevExtTramp 19
- PhysDevExtTramp 20
- PhysDevExtTramp 21
- PhysDevExtTramp 22
- PhysDevExtTramp 23
- PhysDevExtTramp 24
- PhysDevExtTramp 25
- PhysDevExtTramp 26
- PhysDevExtTramp 27
- PhysDevExtTramp 28
- PhysDevExtTramp 29
- PhysDevExtTramp 30
- PhysDevExtTramp 31
- PhysDevExtTramp 32
- PhysDevExtTramp 33
- PhysDevExtTramp 34
- PhysDevExtTramp 35
- PhysDevExtTramp 36
- PhysDevExtTramp 37
- PhysDevExtTramp 38
- PhysDevExtTramp 39
- PhysDevExtTramp 40
- PhysDevExtTramp 41
- PhysDevExtTramp 42
- PhysDevExtTramp 43
- PhysDevExtTramp 44
- PhysDevExtTramp 45
- PhysDevExtTramp 46
- PhysDevExtTramp 47
- PhysDevExtTramp 48
- PhysDevExtTramp 49
- PhysDevExtTramp 50
- PhysDevExtTramp 51
- PhysDevExtTramp 52
- PhysDevExtTramp 53
- PhysDevExtTramp 54
- PhysDevExtTramp 55
- PhysDevExtTramp 56
- PhysDevExtTramp 57
- PhysDevExtTramp 58
- PhysDevExtTramp 59
- PhysDevExtTramp 60
- PhysDevExtTramp 61
- PhysDevExtTramp 62
- PhysDevExtTramp 63
- PhysDevExtTramp 64
- PhysDevExtTramp 65
- PhysDevExtTramp 66
- PhysDevExtTramp 67
- PhysDevExtTramp 68
- PhysDevExtTramp 69
- PhysDevExtTramp 70
- PhysDevExtTramp 71
- PhysDevExtTramp 72
- PhysDevExtTramp 73
- PhysDevExtTramp 74
- PhysDevExtTramp 75
- PhysDevExtTramp 76
- PhysDevExtTramp 77
- PhysDevExtTramp 78
- PhysDevExtTramp 79
- PhysDevExtTramp 80
- PhysDevExtTramp 81
- PhysDevExtTramp 82
- PhysDevExtTramp 83
- PhysDevExtTramp 84
- PhysDevExtTramp 85
- PhysDevExtTramp 86
- PhysDevExtTramp 87
- PhysDevExtTramp 88
- PhysDevExtTramp 89
- PhysDevExtTramp 90
- PhysDevExtTramp 91
- PhysDevExtTramp 92
- PhysDevExtTramp 93
- PhysDevExtTramp 94
- PhysDevExtTramp 95
- PhysDevExtTramp 96
- PhysDevExtTramp 97
- PhysDevExtTramp 98
- PhysDevExtTramp 99
- PhysDevExtTramp 100
- PhysDevExtTramp 101
- PhysDevExtTramp 102
- PhysDevExtTramp 103
- PhysDevExtTramp 104
- PhysDevExtTramp 105
- PhysDevExtTramp 106
- PhysDevExtTramp 107
- PhysDevExtTramp 108
- PhysDevExtTramp 109
- PhysDevExtTramp 110
- PhysDevExtTramp 111
- PhysDevExtTramp 112
- PhysDevExtTramp 113
- PhysDevExtTramp 114
- PhysDevExtTramp 115
- PhysDevExtTramp 116
- PhysDevExtTramp 117
- PhysDevExtTramp 118
- PhysDevExtTramp 119
- PhysDevExtTramp 120
- PhysDevExtTramp 121
- PhysDevExtTramp 122
- PhysDevExtTramp 123
- PhysDevExtTramp 124
- PhysDevExtTramp 125
- PhysDevExtTramp 126
- PhysDevExtTramp 127
- PhysDevExtTramp 128
- PhysDevExtTramp 129
- PhysDevExtTramp 130
- PhysDevExtTramp 131
- PhysDevExtTramp 132
- PhysDevExtTramp 133
- PhysDevExtTramp 134
- PhysDevExtTramp 135
- PhysDevExtTramp 136
- PhysDevExtTramp 137
- PhysDevExtTramp 138
- PhysDevExtTramp 139
- PhysDevExtTramp 140
- PhysDevExtTramp 141
- PhysDevExtTramp 142
- PhysDevExtTramp 143
- PhysDevExtTramp 144
- PhysDevExtTramp 145
- PhysDevExtTramp 146
- PhysDevExtTramp 147
- PhysDevExtTramp 148
- PhysDevExtTramp 149
- PhysDevExtTramp 150
- PhysDevExtTramp 151
- PhysDevExtTramp 152
- PhysDevExtTramp 153
- PhysDevExtTramp 154
- PhysDevExtTramp 155
- PhysDevExtTramp 156
- PhysDevExtTramp 157
- PhysDevExtTramp 158
- PhysDevExtTramp 159
- PhysDevExtTramp 160
- PhysDevExtTramp 161
- PhysDevExtTramp 162
- PhysDevExtTramp 163
- PhysDevExtTramp 164
- PhysDevExtTramp 165
- PhysDevExtTramp 166
- PhysDevExtTramp 167
- PhysDevExtTramp 168
- PhysDevExtTramp 169
- PhysDevExtTramp 170
- PhysDevExtTramp 171
- PhysDevExtTramp 172
- PhysDevExtTramp 173
- PhysDevExtTramp 174
- PhysDevExtTramp 175
- PhysDevExtTramp 176
- PhysDevExtTramp 177
- PhysDevExtTramp 178
- PhysDevExtTramp 179
- PhysDevExtTramp 180
- PhysDevExtTramp 181
- PhysDevExtTramp 182
- PhysDevExtTramp 183
- PhysDevExtTramp 184
- PhysDevExtTramp 185
- PhysDevExtTramp 186
- PhysDevExtTramp 187
- PhysDevExtTramp 188
- PhysDevExtTramp 189
- PhysDevExtTramp 190
- PhysDevExtTramp 191
- PhysDevExtTramp 192
- PhysDevExtTramp 193
- PhysDevExtTramp 194
- PhysDevExtTramp 195
- PhysDevExtTramp 196
- PhysDevExtTramp 197
- PhysDevExtTramp 198
- PhysDevExtTramp 199
- PhysDevExtTramp 200
- PhysDevExtTramp 201
- PhysDevExtTramp 202
- PhysDevExtTramp 203
- PhysDevExtTramp 204
- PhysDevExtTramp 205
- PhysDevExtTramp 206
- PhysDevExtTramp 207
- PhysDevExtTramp 208
- PhysDevExtTramp 209
- PhysDevExtTramp 210
- PhysDevExtTramp 211
- PhysDevExtTramp 212
- PhysDevExtTramp 213
- PhysDevExtTramp 214
- PhysDevExtTramp 215
- PhysDevExtTramp 216
- PhysDevExtTramp 217
- PhysDevExtTramp 218
- PhysDevExtTramp 219
- PhysDevExtTramp 220
- PhysDevExtTramp 221
- PhysDevExtTramp 222
- PhysDevExtTramp 223
- PhysDevExtTramp 224
- PhysDevExtTramp 225
- PhysDevExtTramp 226
- PhysDevExtTramp 227
- PhysDevExtTramp 228
- PhysDevExtTramp 229
- PhysDevExtTramp 230
- PhysDevExtTramp 231
- PhysDevExtTramp 232
- PhysDevExtTramp 233
- PhysDevExtTramp 234
- PhysDevExtTramp 235
- PhysDevExtTramp 236
- PhysDevExtTramp 237
- PhysDevExtTramp 238
- PhysDevExtTramp 239
- PhysDevExtTramp 240
- PhysDevExtTramp 241
- PhysDevExtTramp 242
- PhysDevExtTramp 243
- PhysDevExtTramp 244
- PhysDevExtTramp 245
- PhysDevExtTramp 246
- PhysDevExtTramp 247
- PhysDevExtTramp 248
- PhysDevExtTramp 249
-
- PhysDevExtTermin 0
- PhysDevExtTermin 1
- PhysDevExtTermin 2
- PhysDevExtTermin 3
- PhysDevExtTermin 4
- PhysDevExtTermin 5
- PhysDevExtTermin 6
- PhysDevExtTermin 7
- PhysDevExtTermin 8
- PhysDevExtTermin 9
- PhysDevExtTermin 10
- PhysDevExtTermin 11
- PhysDevExtTermin 12
- PhysDevExtTermin 13
- PhysDevExtTermin 14
- PhysDevExtTermin 15
- PhysDevExtTermin 16
- PhysDevExtTermin 17
- PhysDevExtTermin 18
- PhysDevExtTermin 19
- PhysDevExtTermin 20
- PhysDevExtTermin 21
- PhysDevExtTermin 22
- PhysDevExtTermin 23
- PhysDevExtTermin 24
- PhysDevExtTermin 25
- PhysDevExtTermin 26
- PhysDevExtTermin 27
- PhysDevExtTermin 28
- PhysDevExtTermin 29
- PhysDevExtTermin 30
- PhysDevExtTermin 31
- PhysDevExtTermin 32
- PhysDevExtTermin 33
- PhysDevExtTermin 34
- PhysDevExtTermin 35
- PhysDevExtTermin 36
- PhysDevExtTermin 37
- PhysDevExtTermin 38
- PhysDevExtTermin 39
- PhysDevExtTermin 40
- PhysDevExtTermin 41
- PhysDevExtTermin 42
- PhysDevExtTermin 43
- PhysDevExtTermin 44
- PhysDevExtTermin 45
- PhysDevExtTermin 46
- PhysDevExtTermin 47
- PhysDevExtTermin 48
- PhysDevExtTermin 49
- PhysDevExtTermin 50
- PhysDevExtTermin 51
- PhysDevExtTermin 52
- PhysDevExtTermin 53
- PhysDevExtTermin 54
- PhysDevExtTermin 55
- PhysDevExtTermin 56
- PhysDevExtTermin 57
- PhysDevExtTermin 58
- PhysDevExtTermin 59
- PhysDevExtTermin 60
- PhysDevExtTermin 61
- PhysDevExtTermin 62
- PhysDevExtTermin 63
- PhysDevExtTermin 64
- PhysDevExtTermin 65
- PhysDevExtTermin 66
- PhysDevExtTermin 67
- PhysDevExtTermin 68
- PhysDevExtTermin 69
- PhysDevExtTermin 70
- PhysDevExtTermin 71
- PhysDevExtTermin 72
- PhysDevExtTermin 73
- PhysDevExtTermin 74
- PhysDevExtTermin 75
- PhysDevExtTermin 76
- PhysDevExtTermin 77
- PhysDevExtTermin 78
- PhysDevExtTermin 79
- PhysDevExtTermin 80
- PhysDevExtTermin 81
- PhysDevExtTermin 82
- PhysDevExtTermin 83
- PhysDevExtTermin 84
- PhysDevExtTermin 85
- PhysDevExtTermin 86
- PhysDevExtTermin 87
- PhysDevExtTermin 88
- PhysDevExtTermin 89
- PhysDevExtTermin 90
- PhysDevExtTermin 91
- PhysDevExtTermin 92
- PhysDevExtTermin 93
- PhysDevExtTermin 94
- PhysDevExtTermin 95
- PhysDevExtTermin 96
- PhysDevExtTermin 97
- PhysDevExtTermin 98
- PhysDevExtTermin 99
- PhysDevExtTermin 100
- PhysDevExtTermin 101
- PhysDevExtTermin 102
- PhysDevExtTermin 103
- PhysDevExtTermin 104
- PhysDevExtTermin 105
- PhysDevExtTermin 106
- PhysDevExtTermin 107
- PhysDevExtTermin 108
- PhysDevExtTermin 109
- PhysDevExtTermin 110
- PhysDevExtTermin 111
- PhysDevExtTermin 112
- PhysDevExtTermin 113
- PhysDevExtTermin 114
- PhysDevExtTermin 115
- PhysDevExtTermin 116
- PhysDevExtTermin 117
- PhysDevExtTermin 118
- PhysDevExtTermin 119
- PhysDevExtTermin 120
- PhysDevExtTermin 121
- PhysDevExtTermin 122
- PhysDevExtTermin 123
- PhysDevExtTermin 124
- PhysDevExtTermin 125
- PhysDevExtTermin 126
- PhysDevExtTermin 127
- PhysDevExtTermin 128
- PhysDevExtTermin 129
- PhysDevExtTermin 130
- PhysDevExtTermin 131
- PhysDevExtTermin 132
- PhysDevExtTermin 133
- PhysDevExtTermin 134
- PhysDevExtTermin 135
- PhysDevExtTermin 136
- PhysDevExtTermin 137
- PhysDevExtTermin 138
- PhysDevExtTermin 139
- PhysDevExtTermin 140
- PhysDevExtTermin 141
- PhysDevExtTermin 142
- PhysDevExtTermin 143
- PhysDevExtTermin 144
- PhysDevExtTermin 145
- PhysDevExtTermin 146
- PhysDevExtTermin 147
- PhysDevExtTermin 148
- PhysDevExtTermin 149
- PhysDevExtTermin 150
- PhysDevExtTermin 151
- PhysDevExtTermin 152
- PhysDevExtTermin 153
- PhysDevExtTermin 154
- PhysDevExtTermin 155
- PhysDevExtTermin 156
- PhysDevExtTermin 157
- PhysDevExtTermin 158
- PhysDevExtTermin 159
- PhysDevExtTermin 160
- PhysDevExtTermin 161
- PhysDevExtTermin 162
- PhysDevExtTermin 163
- PhysDevExtTermin 164
- PhysDevExtTermin 165
- PhysDevExtTermin 166
- PhysDevExtTermin 167
- PhysDevExtTermin 168
- PhysDevExtTermin 169
- PhysDevExtTermin 170
- PhysDevExtTermin 171
- PhysDevExtTermin 172
- PhysDevExtTermin 173
- PhysDevExtTermin 174
- PhysDevExtTermin 175
- PhysDevExtTermin 176
- PhysDevExtTermin 177
- PhysDevExtTermin 178
- PhysDevExtTermin 179
- PhysDevExtTermin 180
- PhysDevExtTermin 181
- PhysDevExtTermin 182
- PhysDevExtTermin 183
- PhysDevExtTermin 184
- PhysDevExtTermin 185
- PhysDevExtTermin 186
- PhysDevExtTermin 187
- PhysDevExtTermin 188
- PhysDevExtTermin 189
- PhysDevExtTermin 190
- PhysDevExtTermin 191
- PhysDevExtTermin 192
- PhysDevExtTermin 193
- PhysDevExtTermin 194
- PhysDevExtTermin 195
- PhysDevExtTermin 196
- PhysDevExtTermin 197
- PhysDevExtTermin 198
- PhysDevExtTermin 199
- PhysDevExtTermin 200
- PhysDevExtTermin 201
- PhysDevExtTermin 202
- PhysDevExtTermin 203
- PhysDevExtTermin 204
- PhysDevExtTermin 205
- PhysDevExtTermin 206
- PhysDevExtTermin 207
- PhysDevExtTermin 208
- PhysDevExtTermin 209
- PhysDevExtTermin 210
- PhysDevExtTermin 211
- PhysDevExtTermin 212
- PhysDevExtTermin 213
- PhysDevExtTermin 214
- PhysDevExtTermin 215
- PhysDevExtTermin 216
- PhysDevExtTermin 217
- PhysDevExtTermin 218
- PhysDevExtTermin 219
- PhysDevExtTermin 220
- PhysDevExtTermin 221
- PhysDevExtTermin 222
- PhysDevExtTermin 223
- PhysDevExtTermin 224
- PhysDevExtTermin 225
- PhysDevExtTermin 226
- PhysDevExtTermin 227
- PhysDevExtTermin 228
- PhysDevExtTermin 229
- PhysDevExtTermin 230
- PhysDevExtTermin 231
- PhysDevExtTermin 232
- PhysDevExtTermin 233
- PhysDevExtTermin 234
- PhysDevExtTermin 235
- PhysDevExtTermin 236
- PhysDevExtTermin 237
- PhysDevExtTermin 238
- PhysDevExtTermin 239
- PhysDevExtTermin 240
- PhysDevExtTermin 241
- PhysDevExtTermin 242
- PhysDevExtTermin 243
- PhysDevExtTermin 244
- PhysDevExtTermin 245
- PhysDevExtTermin 246
- PhysDevExtTermin 247
- PhysDevExtTermin 248
- PhysDevExtTermin 249
-
- DevExtTramp 0
- DevExtTramp 1
- DevExtTramp 2
- DevExtTramp 3
- DevExtTramp 4
- DevExtTramp 5
- DevExtTramp 6
- DevExtTramp 7
- DevExtTramp 8
- DevExtTramp 9
- DevExtTramp 10
- DevExtTramp 11
- DevExtTramp 12
- DevExtTramp 13
- DevExtTramp 14
- DevExtTramp 15
- DevExtTramp 16
- DevExtTramp 17
- DevExtTramp 18
- DevExtTramp 19
- DevExtTramp 20
- DevExtTramp 21
- DevExtTramp 22
- DevExtTramp 23
- DevExtTramp 24
- DevExtTramp 25
- DevExtTramp 26
- DevExtTramp 27
- DevExtTramp 28
- DevExtTramp 29
- DevExtTramp 30
- DevExtTramp 31
- DevExtTramp 32
- DevExtTramp 33
- DevExtTramp 34
- DevExtTramp 35
- DevExtTramp 36
- DevExtTramp 37
- DevExtTramp 38
- DevExtTramp 39
- DevExtTramp 40
- DevExtTramp 41
- DevExtTramp 42
- DevExtTramp 43
- DevExtTramp 44
- DevExtTramp 45
- DevExtTramp 46
- DevExtTramp 47
- DevExtTramp 48
- DevExtTramp 49
- DevExtTramp 50
- DevExtTramp 51
- DevExtTramp 52
- DevExtTramp 53
- DevExtTramp 54
- DevExtTramp 55
- DevExtTramp 56
- DevExtTramp 57
- DevExtTramp 58
- DevExtTramp 59
- DevExtTramp 60
- DevExtTramp 61
- DevExtTramp 62
- DevExtTramp 63
- DevExtTramp 64
- DevExtTramp 65
- DevExtTramp 66
- DevExtTramp 67
- DevExtTramp 68
- DevExtTramp 69
- DevExtTramp 70
- DevExtTramp 71
- DevExtTramp 72
- DevExtTramp 73
- DevExtTramp 74
- DevExtTramp 75
- DevExtTramp 76
- DevExtTramp 77
- DevExtTramp 78
- DevExtTramp 79
- DevExtTramp 80
- DevExtTramp 81
- DevExtTramp 82
- DevExtTramp 83
- DevExtTramp 84
- DevExtTramp 85
- DevExtTramp 86
- DevExtTramp 87
- DevExtTramp 88
- DevExtTramp 89
- DevExtTramp 90
- DevExtTramp 91
- DevExtTramp 92
- DevExtTramp 93
- DevExtTramp 94
- DevExtTramp 95
- DevExtTramp 96
- DevExtTramp 97
- DevExtTramp 98
- DevExtTramp 99
- DevExtTramp 100
- DevExtTramp 101
- DevExtTramp 102
- DevExtTramp 103
- DevExtTramp 104
- DevExtTramp 105
- DevExtTramp 106
- DevExtTramp 107
- DevExtTramp 108
- DevExtTramp 109
- DevExtTramp 110
- DevExtTramp 111
- DevExtTramp 112
- DevExtTramp 113
- DevExtTramp 114
- DevExtTramp 115
- DevExtTramp 116
- DevExtTramp 117
- DevExtTramp 118
- DevExtTramp 119
- DevExtTramp 120
- DevExtTramp 121
- DevExtTramp 122
- DevExtTramp 123
- DevExtTramp 124
- DevExtTramp 125
- DevExtTramp 126
- DevExtTramp 127
- DevExtTramp 128
- DevExtTramp 129
- DevExtTramp 130
- DevExtTramp 131
- DevExtTramp 132
- DevExtTramp 133
- DevExtTramp 134
- DevExtTramp 135
- DevExtTramp 136
- DevExtTramp 137
- DevExtTramp 138
- DevExtTramp 139
- DevExtTramp 140
- DevExtTramp 141
- DevExtTramp 142
- DevExtTramp 143
- DevExtTramp 144
- DevExtTramp 145
- DevExtTramp 146
- DevExtTramp 147
- DevExtTramp 148
- DevExtTramp 149
- DevExtTramp 150
- DevExtTramp 151
- DevExtTramp 152
- DevExtTramp 153
- DevExtTramp 154
- DevExtTramp 155
- DevExtTramp 156
- DevExtTramp 157
- DevExtTramp 158
- DevExtTramp 159
- DevExtTramp 160
- DevExtTramp 161
- DevExtTramp 162
- DevExtTramp 163
- DevExtTramp 164
- DevExtTramp 165
- DevExtTramp 166
- DevExtTramp 167
- DevExtTramp 168
- DevExtTramp 169
- DevExtTramp 170
- DevExtTramp 171
- DevExtTramp 172
- DevExtTramp 173
- DevExtTramp 174
- DevExtTramp 175
- DevExtTramp 176
- DevExtTramp 177
- DevExtTramp 178
- DevExtTramp 179
- DevExtTramp 180
- DevExtTramp 181
- DevExtTramp 182
- DevExtTramp 183
- DevExtTramp 184
- DevExtTramp 185
- DevExtTramp 186
- DevExtTramp 187
- DevExtTramp 188
- DevExtTramp 189
- DevExtTramp 190
- DevExtTramp 191
- DevExtTramp 192
- DevExtTramp 193
- DevExtTramp 194
- DevExtTramp 195
- DevExtTramp 196
- DevExtTramp 197
- DevExtTramp 198
- DevExtTramp 199
- DevExtTramp 200
- DevExtTramp 201
- DevExtTramp 202
- DevExtTramp 203
- DevExtTramp 204
- DevExtTramp 205
- DevExtTramp 206
- DevExtTramp 207
- DevExtTramp 208
- DevExtTramp 209
- DevExtTramp 210
- DevExtTramp 211
- DevExtTramp 212
- DevExtTramp 213
- DevExtTramp 214
- DevExtTramp 215
- DevExtTramp 216
- DevExtTramp 217
- DevExtTramp 218
- DevExtTramp 219
- DevExtTramp 220
- DevExtTramp 221
- DevExtTramp 222
- DevExtTramp 223
- DevExtTramp 224
- DevExtTramp 225
- DevExtTramp 226
- DevExtTramp 227
- DevExtTramp 228
- DevExtTramp 229
- DevExtTramp 230
- DevExtTramp 231
- DevExtTramp 232
- DevExtTramp 233
- DevExtTramp 234
- DevExtTramp 235
- DevExtTramp 236
- DevExtTramp 237
- DevExtTramp 238
- DevExtTramp 239
- DevExtTramp 240
- DevExtTramp 241
- DevExtTramp 242
- DevExtTramp 243
- DevExtTramp 244
- DevExtTramp 245
- DevExtTramp 246
- DevExtTramp 247
- DevExtTramp 248
- DevExtTramp 249
diff --git a/thirdparty/vulkan/loader/unknown_ext_chain_masm.asm b/thirdparty/vulkan/loader/unknown_ext_chain_masm.asm
deleted file mode 100644
index 34bc7c2fc7..0000000000
--- a/thirdparty/vulkan/loader/unknown_ext_chain_masm.asm
+++ /dev/null
@@ -1,883 +0,0 @@
-;
-; Copyright (c) 2017 The Khronos Group Inc.
-; Copyright (c) 2017 Valve Corporation
-; Copyright (c) 2017 LunarG, Inc.
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-; See the License for the specific language governing permissions and
-; limitations under the License.
-;
-; Author: Lenny Komow <lenny@lunarg.com>
-;
-
-; This code is used to pass on device (including physical device) extensions through the call chain. It must do this without
-; creating a stack frame, because the actual parameters of the call are not known. Since the first parameter is known to be a
-; VkPhysicalDevice or a dispatchable object it can unwrap the object, possibly overwriting the wrapped physical device, and then
-; jump to the next function in the call chain
-
-; Codegen defines a number of values, chiefly offsets of members within structs and sizes of data types within gen_defines.asm.
-; Struct member offsets are defined in the format "XX_OFFSET_YY" where XX indicates the member within the struct and YY indicates
-; the struct type that it is a member of. Data type sizes are defined in the format "XX_SIZE" where XX indicates the data type.
-INCLUDE gen_defines.asm
-
-; 64-bit values and macro
-IFDEF rax
-
-PhysDevExtTramp macro num:req
-public vkPhysDevExtTramp&num&
-vkPhysDevExtTramp&num&:
- mov rax, qword ptr [rcx] ; Dereference the wrapped VkPhysicalDevice to get the dispatch table in rax
- mov rcx, qword ptr [rcx + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] ; Load the unwrapped VkPhysicalDevice into rcx
- jmp qword ptr [rax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * num))] ; Jump to the next function in the chain, preserving the args in other registers
-endm
-
-PhysDevExtTermin macro num
-public vkPhysDevExtTermin&num&
-vkPhysDevExtTermin&num&:
- mov rax, qword ptr [rcx + ICD_TERM_OFFSET_PHYS_DEV_TERM] ; Store the loader_icd_term* in rax
- cmp qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))], 0 ; Check if the next function in the chain is NULL
- je terminError&num& ; Go to the error section if it is NULL
- mov rcx, qword ptr [rcx + PHYS_DEV_OFFSET_PHYS_DEV_TERM] ; Load the unwrapped VkPhysicalDevice into the first arg
- jmp qword ptr [rax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))] ; Jump to the next function in the chain
-terminError&num&:
- sub rsp, 56 ; Create the stack frame
- mov rcx, qword ptr [rax + INSTANCE_OFFSET_ICD_TERM] ; Load the loader_instance into rcx (first arg)
- mov rax, qword ptr [rcx + (HASH_OFFSET_INSTANCE + (HASH_SIZE * num) + FUNC_NAME_OFFSET_HASH)] ; Load the func name into rax
- lea r9, termin_error_string ; Load the error string into r9 (fourth arg)
- xor r8d, r8d ; Set r8 to zero (third arg)
- mov qword ptr [rsp + 32], rax ; Move the func name onto the stack (fifth arg)
- lea edx, [r8 + VK_DEBUG_REPORT_ERROR_BIT_EXT] ; Write the error logging bit to rdx (second arg)
- call loader_log ; Log the error message before we crash
- add rsp, 56 ; Clean up the stack frame
- mov rax, 0
- jmp rax ; Crash intentionally by jumping to address zero
-endm
-
-DevExtTramp macro num
-public vkdev_ext&num&
-vkdev_ext&num&:
- mov rax, qword ptr [rcx] ; Dereference the handle to get the dispatch table
- jmp qword ptr [rax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * num))] ; Jump to the appropriate call chain
-endm
-
-; 32-bit values and macro
-ELSE
-
-PhysDevExtTramp macro num
-public _vkPhysDevExtTramp&num&@4
-_vkPhysDevExtTramp&num&@4:
- mov eax, dword ptr [esp + 4] ; Load the wrapped VkPhysicalDevice into eax
- mov ecx, [eax + PHYS_DEV_OFFSET_PHYS_DEV_TRAMP] ; Load the unwrapped VkPhysicalDevice into ecx
- mov [esp + 4], ecx ; Overwrite the wrapped VkPhysicalDevice with the unwrapped one (on the stack)
- mov eax, [eax] ; Dereference the wrapped VkPhysicalDevice to get the dispatch table in eax
- jmp dword ptr [eax + (PHYS_DEV_OFFSET_INST_DISPATCH + (PTR_SIZE * num))] ; Jump to the next function in the chain, preserving the args on the stack
-endm
-
-PhysDevExtTermin macro num
-public _vkPhysDevExtTermin&num&@4
-_vkPhysDevExtTermin&num&@4:
- mov ecx, dword ptr [esp + 4] ; Move the wrapped VkPhysicalDevice into ecx
- mov eax, dword ptr [ecx + ICD_TERM_OFFSET_PHYS_DEV_TERM] ; Store the loader_icd_term* in eax
- cmp dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))], 0 ; Check if the next function in the chain is NULL
- je terminError&num& ; Go to the error section if it is NULL
- mov ecx, dword ptr [ecx + PHYS_DEV_OFFSET_PHYS_DEV_TERM] ; Unwrap the VkPhysicalDevice in ecx
- mov dword ptr [esp + 4], ecx ; Copy the unwrapped VkPhysicalDevice into the first arg
- jmp dword ptr [eax + (DISPATCH_OFFSET_ICD_TERM + (PTR_SIZE * num))] ; Jump to the next function in the chain
-terminError&num&:
- mov eax, dword ptr [eax + INSTANCE_OFFSET_ICD_TERM] ; Load the loader_instance into eax
- push dword ptr [eax + (HASH_OFFSET_INSTANCE + (HASH_SIZE * num) + FUNC_NAME_OFFSET_HASH)] ; Push the func name (fifth arg)
- push offset termin_error_string ; Push the error string (fourth arg)
- push 0 ; Push zero (third arg)
- push VK_DEBUG_REPORT_ERROR_BIT_EXT ; Push the error logging bit (second arg)
- push eax ; Push the loader_instance (first arg)
- call _loader_log ; Log the error message before we crash
- add esp, 20 ; Clean up the args
- mov eax, 0
- jmp eax ; Crash intentionally by jumping to address zero
-endm
-
-DevExtTramp macro num
-public _vkdev_ext&num&@4
-_vkdev_ext&num&@4:
- mov eax, dword ptr [esp + 4] ; Dereference the handle to get the dispatch table
- jmp dword ptr [eax + (EXT_OFFSET_DEVICE_DISPATCH + (PTR_SIZE * num))] ; Jump to the appropriate call chain
-endm
-
-; This is also needed for 32-bit only
-.model flat
-
-ENDIF
-
-.const
- termin_error_string db 'Extension %s not supported for this physical device', 0
-
-.code
-
-IFDEF rax
-extrn loader_log:near
-ELSE
-extrn _loader_log:near
-ENDIF
-
- PhysDevExtTramp 0
- PhysDevExtTramp 1
- PhysDevExtTramp 2
- PhysDevExtTramp 3
- PhysDevExtTramp 4
- PhysDevExtTramp 5
- PhysDevExtTramp 6
- PhysDevExtTramp 7
- PhysDevExtTramp 8
- PhysDevExtTramp 9
- PhysDevExtTramp 10
- PhysDevExtTramp 11
- PhysDevExtTramp 12
- PhysDevExtTramp 13
- PhysDevExtTramp 14
- PhysDevExtTramp 15
- PhysDevExtTramp 16
- PhysDevExtTramp 17
- PhysDevExtTramp 18
- PhysDevExtTramp 19
- PhysDevExtTramp 20
- PhysDevExtTramp 21
- PhysDevExtTramp 22
- PhysDevExtTramp 23
- PhysDevExtTramp 24
- PhysDevExtTramp 25
- PhysDevExtTramp 26
- PhysDevExtTramp 27
- PhysDevExtTramp 28
- PhysDevExtTramp 29
- PhysDevExtTramp 30
- PhysDevExtTramp 31
- PhysDevExtTramp 32
- PhysDevExtTramp 33
- PhysDevExtTramp 34
- PhysDevExtTramp 35
- PhysDevExtTramp 36
- PhysDevExtTramp 37
- PhysDevExtTramp 38
- PhysDevExtTramp 39
- PhysDevExtTramp 40
- PhysDevExtTramp 41
- PhysDevExtTramp 42
- PhysDevExtTramp 43
- PhysDevExtTramp 44
- PhysDevExtTramp 45
- PhysDevExtTramp 46
- PhysDevExtTramp 47
- PhysDevExtTramp 48
- PhysDevExtTramp 49
- PhysDevExtTramp 50
- PhysDevExtTramp 51
- PhysDevExtTramp 52
- PhysDevExtTramp 53
- PhysDevExtTramp 54
- PhysDevExtTramp 55
- PhysDevExtTramp 56
- PhysDevExtTramp 57
- PhysDevExtTramp 58
- PhysDevExtTramp 59
- PhysDevExtTramp 60
- PhysDevExtTramp 61
- PhysDevExtTramp 62
- PhysDevExtTramp 63
- PhysDevExtTramp 64
- PhysDevExtTramp 65
- PhysDevExtTramp 66
- PhysDevExtTramp 67
- PhysDevExtTramp 68
- PhysDevExtTramp 69
- PhysDevExtTramp 70
- PhysDevExtTramp 71
- PhysDevExtTramp 72
- PhysDevExtTramp 73
- PhysDevExtTramp 74
- PhysDevExtTramp 75
- PhysDevExtTramp 76
- PhysDevExtTramp 77
- PhysDevExtTramp 78
- PhysDevExtTramp 79
- PhysDevExtTramp 80
- PhysDevExtTramp 81
- PhysDevExtTramp 82
- PhysDevExtTramp 83
- PhysDevExtTramp 84
- PhysDevExtTramp 85
- PhysDevExtTramp 86
- PhysDevExtTramp 87
- PhysDevExtTramp 88
- PhysDevExtTramp 89
- PhysDevExtTramp 90
- PhysDevExtTramp 91
- PhysDevExtTramp 92
- PhysDevExtTramp 93
- PhysDevExtTramp 94
- PhysDevExtTramp 95
- PhysDevExtTramp 96
- PhysDevExtTramp 97
- PhysDevExtTramp 98
- PhysDevExtTramp 99
- PhysDevExtTramp 100
- PhysDevExtTramp 101
- PhysDevExtTramp 102
- PhysDevExtTramp 103
- PhysDevExtTramp 104
- PhysDevExtTramp 105
- PhysDevExtTramp 106
- PhysDevExtTramp 107
- PhysDevExtTramp 108
- PhysDevExtTramp 109
- PhysDevExtTramp 110
- PhysDevExtTramp 111
- PhysDevExtTramp 112
- PhysDevExtTramp 113
- PhysDevExtTramp 114
- PhysDevExtTramp 115
- PhysDevExtTramp 116
- PhysDevExtTramp 117
- PhysDevExtTramp 118
- PhysDevExtTramp 119
- PhysDevExtTramp 120
- PhysDevExtTramp 121
- PhysDevExtTramp 122
- PhysDevExtTramp 123
- PhysDevExtTramp 124
- PhysDevExtTramp 125
- PhysDevExtTramp 126
- PhysDevExtTramp 127
- PhysDevExtTramp 128
- PhysDevExtTramp 129
- PhysDevExtTramp 130
- PhysDevExtTramp 131
- PhysDevExtTramp 132
- PhysDevExtTramp 133
- PhysDevExtTramp 134
- PhysDevExtTramp 135
- PhysDevExtTramp 136
- PhysDevExtTramp 137
- PhysDevExtTramp 138
- PhysDevExtTramp 139
- PhysDevExtTramp 140
- PhysDevExtTramp 141
- PhysDevExtTramp 142
- PhysDevExtTramp 143
- PhysDevExtTramp 144
- PhysDevExtTramp 145
- PhysDevExtTramp 146
- PhysDevExtTramp 147
- PhysDevExtTramp 148
- PhysDevExtTramp 149
- PhysDevExtTramp 150
- PhysDevExtTramp 151
- PhysDevExtTramp 152
- PhysDevExtTramp 153
- PhysDevExtTramp 154
- PhysDevExtTramp 155
- PhysDevExtTramp 156
- PhysDevExtTramp 157
- PhysDevExtTramp 158
- PhysDevExtTramp 159
- PhysDevExtTramp 160
- PhysDevExtTramp 161
- PhysDevExtTramp 162
- PhysDevExtTramp 163
- PhysDevExtTramp 164
- PhysDevExtTramp 165
- PhysDevExtTramp 166
- PhysDevExtTramp 167
- PhysDevExtTramp 168
- PhysDevExtTramp 169
- PhysDevExtTramp 170
- PhysDevExtTramp 171
- PhysDevExtTramp 172
- PhysDevExtTramp 173
- PhysDevExtTramp 174
- PhysDevExtTramp 175
- PhysDevExtTramp 176
- PhysDevExtTramp 177
- PhysDevExtTramp 178
- PhysDevExtTramp 179
- PhysDevExtTramp 180
- PhysDevExtTramp 181
- PhysDevExtTramp 182
- PhysDevExtTramp 183
- PhysDevExtTramp 184
- PhysDevExtTramp 185
- PhysDevExtTramp 186
- PhysDevExtTramp 187
- PhysDevExtTramp 188
- PhysDevExtTramp 189
- PhysDevExtTramp 190
- PhysDevExtTramp 191
- PhysDevExtTramp 192
- PhysDevExtTramp 193
- PhysDevExtTramp 194
- PhysDevExtTramp 195
- PhysDevExtTramp 196
- PhysDevExtTramp 197
- PhysDevExtTramp 198
- PhysDevExtTramp 199
- PhysDevExtTramp 200
- PhysDevExtTramp 201
- PhysDevExtTramp 202
- PhysDevExtTramp 203
- PhysDevExtTramp 204
- PhysDevExtTramp 205
- PhysDevExtTramp 206
- PhysDevExtTramp 207
- PhysDevExtTramp 208
- PhysDevExtTramp 209
- PhysDevExtTramp 210
- PhysDevExtTramp 211
- PhysDevExtTramp 212
- PhysDevExtTramp 213
- PhysDevExtTramp 214
- PhysDevExtTramp 215
- PhysDevExtTramp 216
- PhysDevExtTramp 217
- PhysDevExtTramp 218
- PhysDevExtTramp 219
- PhysDevExtTramp 220
- PhysDevExtTramp 221
- PhysDevExtTramp 222
- PhysDevExtTramp 223
- PhysDevExtTramp 224
- PhysDevExtTramp 225
- PhysDevExtTramp 226
- PhysDevExtTramp 227
- PhysDevExtTramp 228
- PhysDevExtTramp 229
- PhysDevExtTramp 230
- PhysDevExtTramp 231
- PhysDevExtTramp 232
- PhysDevExtTramp 233
- PhysDevExtTramp 234
- PhysDevExtTramp 235
- PhysDevExtTramp 236
- PhysDevExtTramp 237
- PhysDevExtTramp 238
- PhysDevExtTramp 239
- PhysDevExtTramp 240
- PhysDevExtTramp 241
- PhysDevExtTramp 242
- PhysDevExtTramp 243
- PhysDevExtTramp 244
- PhysDevExtTramp 245
- PhysDevExtTramp 246
- PhysDevExtTramp 247
- PhysDevExtTramp 248
- PhysDevExtTramp 249
-
- PhysDevExtTermin 0
- PhysDevExtTermin 1
- PhysDevExtTermin 2
- PhysDevExtTermin 3
- PhysDevExtTermin 4
- PhysDevExtTermin 5
- PhysDevExtTermin 6
- PhysDevExtTermin 7
- PhysDevExtTermin 8
- PhysDevExtTermin 9
- PhysDevExtTermin 10
- PhysDevExtTermin 11
- PhysDevExtTermin 12
- PhysDevExtTermin 13
- PhysDevExtTermin 14
- PhysDevExtTermin 15
- PhysDevExtTermin 16
- PhysDevExtTermin 17
- PhysDevExtTermin 18
- PhysDevExtTermin 19
- PhysDevExtTermin 20
- PhysDevExtTermin 21
- PhysDevExtTermin 22
- PhysDevExtTermin 23
- PhysDevExtTermin 24
- PhysDevExtTermin 25
- PhysDevExtTermin 26
- PhysDevExtTermin 27
- PhysDevExtTermin 28
- PhysDevExtTermin 29
- PhysDevExtTermin 30
- PhysDevExtTermin 31
- PhysDevExtTermin 32
- PhysDevExtTermin 33
- PhysDevExtTermin 34
- PhysDevExtTermin 35
- PhysDevExtTermin 36
- PhysDevExtTermin 37
- PhysDevExtTermin 38
- PhysDevExtTermin 39
- PhysDevExtTermin 40
- PhysDevExtTermin 41
- PhysDevExtTermin 42
- PhysDevExtTermin 43
- PhysDevExtTermin 44
- PhysDevExtTermin 45
- PhysDevExtTermin 46
- PhysDevExtTermin 47
- PhysDevExtTermin 48
- PhysDevExtTermin 49
- PhysDevExtTermin 50
- PhysDevExtTermin 51
- PhysDevExtTermin 52
- PhysDevExtTermin 53
- PhysDevExtTermin 54
- PhysDevExtTermin 55
- PhysDevExtTermin 56
- PhysDevExtTermin 57
- PhysDevExtTermin 58
- PhysDevExtTermin 59
- PhysDevExtTermin 60
- PhysDevExtTermin 61
- PhysDevExtTermin 62
- PhysDevExtTermin 63
- PhysDevExtTermin 64
- PhysDevExtTermin 65
- PhysDevExtTermin 66
- PhysDevExtTermin 67
- PhysDevExtTermin 68
- PhysDevExtTermin 69
- PhysDevExtTermin 70
- PhysDevExtTermin 71
- PhysDevExtTermin 72
- PhysDevExtTermin 73
- PhysDevExtTermin 74
- PhysDevExtTermin 75
- PhysDevExtTermin 76
- PhysDevExtTermin 77
- PhysDevExtTermin 78
- PhysDevExtTermin 79
- PhysDevExtTermin 80
- PhysDevExtTermin 81
- PhysDevExtTermin 82
- PhysDevExtTermin 83
- PhysDevExtTermin 84
- PhysDevExtTermin 85
- PhysDevExtTermin 86
- PhysDevExtTermin 87
- PhysDevExtTermin 88
- PhysDevExtTermin 89
- PhysDevExtTermin 90
- PhysDevExtTermin 91
- PhysDevExtTermin 92
- PhysDevExtTermin 93
- PhysDevExtTermin 94
- PhysDevExtTermin 95
- PhysDevExtTermin 96
- PhysDevExtTermin 97
- PhysDevExtTermin 98
- PhysDevExtTermin 99
- PhysDevExtTermin 100
- PhysDevExtTermin 101
- PhysDevExtTermin 102
- PhysDevExtTermin 103
- PhysDevExtTermin 104
- PhysDevExtTermin 105
- PhysDevExtTermin 106
- PhysDevExtTermin 107
- PhysDevExtTermin 108
- PhysDevExtTermin 109
- PhysDevExtTermin 110
- PhysDevExtTermin 111
- PhysDevExtTermin 112
- PhysDevExtTermin 113
- PhysDevExtTermin 114
- PhysDevExtTermin 115
- PhysDevExtTermin 116
- PhysDevExtTermin 117
- PhysDevExtTermin 118
- PhysDevExtTermin 119
- PhysDevExtTermin 120
- PhysDevExtTermin 121
- PhysDevExtTermin 122
- PhysDevExtTermin 123
- PhysDevExtTermin 124
- PhysDevExtTermin 125
- PhysDevExtTermin 126
- PhysDevExtTermin 127
- PhysDevExtTermin 128
- PhysDevExtTermin 129
- PhysDevExtTermin 130
- PhysDevExtTermin 131
- PhysDevExtTermin 132
- PhysDevExtTermin 133
- PhysDevExtTermin 134
- PhysDevExtTermin 135
- PhysDevExtTermin 136
- PhysDevExtTermin 137
- PhysDevExtTermin 138
- PhysDevExtTermin 139
- PhysDevExtTermin 140
- PhysDevExtTermin 141
- PhysDevExtTermin 142
- PhysDevExtTermin 143
- PhysDevExtTermin 144
- PhysDevExtTermin 145
- PhysDevExtTermin 146
- PhysDevExtTermin 147
- PhysDevExtTermin 148
- PhysDevExtTermin 149
- PhysDevExtTermin 150
- PhysDevExtTermin 151
- PhysDevExtTermin 152
- PhysDevExtTermin 153
- PhysDevExtTermin 154
- PhysDevExtTermin 155
- PhysDevExtTermin 156
- PhysDevExtTermin 157
- PhysDevExtTermin 158
- PhysDevExtTermin 159
- PhysDevExtTermin 160
- PhysDevExtTermin 161
- PhysDevExtTermin 162
- PhysDevExtTermin 163
- PhysDevExtTermin 164
- PhysDevExtTermin 165
- PhysDevExtTermin 166
- PhysDevExtTermin 167
- PhysDevExtTermin 168
- PhysDevExtTermin 169
- PhysDevExtTermin 170
- PhysDevExtTermin 171
- PhysDevExtTermin 172
- PhysDevExtTermin 173
- PhysDevExtTermin 174
- PhysDevExtTermin 175
- PhysDevExtTermin 176
- PhysDevExtTermin 177
- PhysDevExtTermin 178
- PhysDevExtTermin 179
- PhysDevExtTermin 180
- PhysDevExtTermin 181
- PhysDevExtTermin 182
- PhysDevExtTermin 183
- PhysDevExtTermin 184
- PhysDevExtTermin 185
- PhysDevExtTermin 186
- PhysDevExtTermin 187
- PhysDevExtTermin 188
- PhysDevExtTermin 189
- PhysDevExtTermin 190
- PhysDevExtTermin 191
- PhysDevExtTermin 192
- PhysDevExtTermin 193
- PhysDevExtTermin 194
- PhysDevExtTermin 195
- PhysDevExtTermin 196
- PhysDevExtTermin 197
- PhysDevExtTermin 198
- PhysDevExtTermin 199
- PhysDevExtTermin 200
- PhysDevExtTermin 201
- PhysDevExtTermin 202
- PhysDevExtTermin 203
- PhysDevExtTermin 204
- PhysDevExtTermin 205
- PhysDevExtTermin 206
- PhysDevExtTermin 207
- PhysDevExtTermin 208
- PhysDevExtTermin 209
- PhysDevExtTermin 210
- PhysDevExtTermin 211
- PhysDevExtTermin 212
- PhysDevExtTermin 213
- PhysDevExtTermin 214
- PhysDevExtTermin 215
- PhysDevExtTermin 216
- PhysDevExtTermin 217
- PhysDevExtTermin 218
- PhysDevExtTermin 219
- PhysDevExtTermin 220
- PhysDevExtTermin 221
- PhysDevExtTermin 222
- PhysDevExtTermin 223
- PhysDevExtTermin 224
- PhysDevExtTermin 225
- PhysDevExtTermin 226
- PhysDevExtTermin 227
- PhysDevExtTermin 228
- PhysDevExtTermin 229
- PhysDevExtTermin 230
- PhysDevExtTermin 231
- PhysDevExtTermin 232
- PhysDevExtTermin 233
- PhysDevExtTermin 234
- PhysDevExtTermin 235
- PhysDevExtTermin 236
- PhysDevExtTermin 237
- PhysDevExtTermin 238
- PhysDevExtTermin 239
- PhysDevExtTermin 240
- PhysDevExtTermin 241
- PhysDevExtTermin 242
- PhysDevExtTermin 243
- PhysDevExtTermin 244
- PhysDevExtTermin 245
- PhysDevExtTermin 246
- PhysDevExtTermin 247
- PhysDevExtTermin 248
- PhysDevExtTermin 249
-
- DevExtTramp 0
- DevExtTramp 1
- DevExtTramp 2
- DevExtTramp 3
- DevExtTramp 4
- DevExtTramp 5
- DevExtTramp 6
- DevExtTramp 7
- DevExtTramp 8
- DevExtTramp 9
- DevExtTramp 10
- DevExtTramp 11
- DevExtTramp 12
- DevExtTramp 13
- DevExtTramp 14
- DevExtTramp 15
- DevExtTramp 16
- DevExtTramp 17
- DevExtTramp 18
- DevExtTramp 19
- DevExtTramp 20
- DevExtTramp 21
- DevExtTramp 22
- DevExtTramp 23
- DevExtTramp 24
- DevExtTramp 25
- DevExtTramp 26
- DevExtTramp 27
- DevExtTramp 28
- DevExtTramp 29
- DevExtTramp 30
- DevExtTramp 31
- DevExtTramp 32
- DevExtTramp 33
- DevExtTramp 34
- DevExtTramp 35
- DevExtTramp 36
- DevExtTramp 37
- DevExtTramp 38
- DevExtTramp 39
- DevExtTramp 40
- DevExtTramp 41
- DevExtTramp 42
- DevExtTramp 43
- DevExtTramp 44
- DevExtTramp 45
- DevExtTramp 46
- DevExtTramp 47
- DevExtTramp 48
- DevExtTramp 49
- DevExtTramp 50
- DevExtTramp 51
- DevExtTramp 52
- DevExtTramp 53
- DevExtTramp 54
- DevExtTramp 55
- DevExtTramp 56
- DevExtTramp 57
- DevExtTramp 58
- DevExtTramp 59
- DevExtTramp 60
- DevExtTramp 61
- DevExtTramp 62
- DevExtTramp 63
- DevExtTramp 64
- DevExtTramp 65
- DevExtTramp 66
- DevExtTramp 67
- DevExtTramp 68
- DevExtTramp 69
- DevExtTramp 70
- DevExtTramp 71
- DevExtTramp 72
- DevExtTramp 73
- DevExtTramp 74
- DevExtTramp 75
- DevExtTramp 76
- DevExtTramp 77
- DevExtTramp 78
- DevExtTramp 79
- DevExtTramp 80
- DevExtTramp 81
- DevExtTramp 82
- DevExtTramp 83
- DevExtTramp 84
- DevExtTramp 85
- DevExtTramp 86
- DevExtTramp 87
- DevExtTramp 88
- DevExtTramp 89
- DevExtTramp 90
- DevExtTramp 91
- DevExtTramp 92
- DevExtTramp 93
- DevExtTramp 94
- DevExtTramp 95
- DevExtTramp 96
- DevExtTramp 97
- DevExtTramp 98
- DevExtTramp 99
- DevExtTramp 100
- DevExtTramp 101
- DevExtTramp 102
- DevExtTramp 103
- DevExtTramp 104
- DevExtTramp 105
- DevExtTramp 106
- DevExtTramp 107
- DevExtTramp 108
- DevExtTramp 109
- DevExtTramp 110
- DevExtTramp 111
- DevExtTramp 112
- DevExtTramp 113
- DevExtTramp 114
- DevExtTramp 115
- DevExtTramp 116
- DevExtTramp 117
- DevExtTramp 118
- DevExtTramp 119
- DevExtTramp 120
- DevExtTramp 121
- DevExtTramp 122
- DevExtTramp 123
- DevExtTramp 124
- DevExtTramp 125
- DevExtTramp 126
- DevExtTramp 127
- DevExtTramp 128
- DevExtTramp 129
- DevExtTramp 130
- DevExtTramp 131
- DevExtTramp 132
- DevExtTramp 133
- DevExtTramp 134
- DevExtTramp 135
- DevExtTramp 136
- DevExtTramp 137
- DevExtTramp 138
- DevExtTramp 139
- DevExtTramp 140
- DevExtTramp 141
- DevExtTramp 142
- DevExtTramp 143
- DevExtTramp 144
- DevExtTramp 145
- DevExtTramp 146
- DevExtTramp 147
- DevExtTramp 148
- DevExtTramp 149
- DevExtTramp 150
- DevExtTramp 151
- DevExtTramp 152
- DevExtTramp 153
- DevExtTramp 154
- DevExtTramp 155
- DevExtTramp 156
- DevExtTramp 157
- DevExtTramp 158
- DevExtTramp 159
- DevExtTramp 160
- DevExtTramp 161
- DevExtTramp 162
- DevExtTramp 163
- DevExtTramp 164
- DevExtTramp 165
- DevExtTramp 166
- DevExtTramp 167
- DevExtTramp 168
- DevExtTramp 169
- DevExtTramp 170
- DevExtTramp 171
- DevExtTramp 172
- DevExtTramp 173
- DevExtTramp 174
- DevExtTramp 175
- DevExtTramp 176
- DevExtTramp 177
- DevExtTramp 178
- DevExtTramp 179
- DevExtTramp 180
- DevExtTramp 181
- DevExtTramp 182
- DevExtTramp 183
- DevExtTramp 184
- DevExtTramp 185
- DevExtTramp 186
- DevExtTramp 187
- DevExtTramp 188
- DevExtTramp 189
- DevExtTramp 190
- DevExtTramp 191
- DevExtTramp 192
- DevExtTramp 193
- DevExtTramp 194
- DevExtTramp 195
- DevExtTramp 196
- DevExtTramp 197
- DevExtTramp 198
- DevExtTramp 199
- DevExtTramp 200
- DevExtTramp 201
- DevExtTramp 202
- DevExtTramp 203
- DevExtTramp 204
- DevExtTramp 205
- DevExtTramp 206
- DevExtTramp 207
- DevExtTramp 208
- DevExtTramp 209
- DevExtTramp 210
- DevExtTramp 211
- DevExtTramp 212
- DevExtTramp 213
- DevExtTramp 214
- DevExtTramp 215
- DevExtTramp 216
- DevExtTramp 217
- DevExtTramp 218
- DevExtTramp 219
- DevExtTramp 220
- DevExtTramp 221
- DevExtTramp 222
- DevExtTramp 223
- DevExtTramp 224
- DevExtTramp 225
- DevExtTramp 226
- DevExtTramp 227
- DevExtTramp 228
- DevExtTramp 229
- DevExtTramp 230
- DevExtTramp 231
- DevExtTramp 232
- DevExtTramp 233
- DevExtTramp 234
- DevExtTramp 235
- DevExtTramp 236
- DevExtTramp 237
- DevExtTramp 238
- DevExtTramp 239
- DevExtTramp 240
- DevExtTramp 241
- DevExtTramp 242
- DevExtTramp 243
- DevExtTramp 244
- DevExtTramp 245
- DevExtTramp 246
- DevExtTramp 247
- DevExtTramp 248
- DevExtTramp 249
-
-end
diff --git a/thirdparty/vulkan/loader/vk_dispatch_table_helper.h b/thirdparty/vulkan/loader/vk_dispatch_table_helper.h
index d934798db4..386745a1e7 100644
--- a/thirdparty/vulkan/loader/vk_dispatch_table_helper.h
+++ b/thirdparty/vulkan/loader/vk_dispatch_table_helper.h
@@ -63,10 +63,10 @@ static VKAPI_ATTR void VKAPI_CALL StubCmdPushDescriptorSetWithTemplateKHR(VkComm
static VKAPI_ATTR VkResult VKAPI_CALL StubCreateDescriptorUpdateTemplateKHR(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) { return VK_SUCCESS; };
static VKAPI_ATTR void VKAPI_CALL StubDestroyDescriptorUpdateTemplateKHR(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) { };
static VKAPI_ATTR void VKAPI_CALL StubUpdateDescriptorSetWithTemplateKHR(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) { };
-static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return VK_SUCCESS; };
-static VKAPI_ATTR void VKAPI_CALL StubCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo) { };
-static VKAPI_ATTR void VKAPI_CALL StubCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo) { };
+static VKAPI_ATTR VkResult VKAPI_CALL StubCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo) { };
+static VKAPI_ATTR void VKAPI_CALL StubCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo) { };
+static VKAPI_ATTR void VKAPI_CALL StubCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo) { };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetSwapchainStatusKHR(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
#ifdef VK_USE_PLATFORM_WIN32_KHR
static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceWin32HandleKHR(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo) { return VK_SUCCESS; };
@@ -76,6 +76,8 @@ static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceWin32HandleKHR(VkDevice device
#endif // VK_USE_PLATFORM_WIN32_KHR
static VKAPI_ATTR VkResult VKAPI_CALL StubImportFenceFdKHR(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo) { return VK_SUCCESS; };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetFenceFdKHR(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireProfilingLockKHR(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubReleaseProfilingLockKHR(VkDevice device) { };
static VKAPI_ATTR void VKAPI_CALL StubGetImageMemoryRequirements2KHR(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { };
static VKAPI_ATTR void VKAPI_CALL StubGetBufferMemoryRequirements2KHR(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) { };
static VKAPI_ATTR void VKAPI_CALL StubGetImageSparseMemoryRequirements2KHR(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) { };
@@ -87,8 +89,11 @@ static VKAPI_ATTR void VKAPI_CALL StubGetDescriptorSetLayoutSupportKHR(VkDevice
static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
static VKAPI_ATTR void VKAPI_CALL StubCmdDrawIndexedIndirectCountKHR(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride) { };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetSemaphoreCounterValueKHR(VkDevice device, VkSemaphore semaphore, uint64_t* pValue) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfoKHR* pWaitInfo, uint64_t timeout) { return VK_SUCCESS; };
-static VKAPI_ATTR VkResult VKAPI_CALL StubSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfoKHR* pSignalInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubWaitSemaphoresKHR(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout) { return VK_SUCCESS; };
+static VKAPI_ATTR VkResult VKAPI_CALL StubSignalSemaphoreKHR(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo) { return VK_SUCCESS; };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferDeviceAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferOpaqueCaptureAddressKHR(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { };
+static VKAPI_ATTR void VKAPI_CALL StubGetDeviceMemoryOpaqueCaptureAddressKHR(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) { };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutablePropertiesKHR(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties) { return VK_SUCCESS; };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableStatisticsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics) { return VK_SUCCESS; };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetPipelineExecutableInternalRepresentationsKHR(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) { return VK_SUCCESS; };
@@ -175,7 +180,7 @@ static VKAPI_ATTR VkResult VKAPI_CALL StubReleasePerformanceConfigurationINTEL(V
static VKAPI_ATTR VkResult VKAPI_CALL StubQueueSetPerformanceConfigurationINTEL(VkQueue queue, VkPerformanceConfigurationINTEL configuration) { return VK_SUCCESS; };
static VKAPI_ATTR VkResult VKAPI_CALL StubGetPerformanceParameterINTEL(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue) { return VK_SUCCESS; };
static VKAPI_ATTR void VKAPI_CALL StubSetLocalDimmingAMD(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable) { };
-static VKAPI_ATTR void VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo) { };
+static VKAPI_ATTR void VKAPI_CALL StubGetBufferDeviceAddressEXT(VkDevice device, const VkBufferDeviceAddressInfo* pInfo) { };
#ifdef VK_USE_PLATFORM_WIN32_KHR
static VKAPI_ATTR VkResult VKAPI_CALL StubAcquireFullScreenExclusiveModeEXT(VkDevice device, VkSwapchainKHR swapchain) { return VK_SUCCESS; };
#endif // VK_USE_PLATFORM_WIN32_KHR
@@ -330,6 +335,19 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp
table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate) gpa(device, "vkDestroyDescriptorUpdateTemplate");
table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate) gpa(device, "vkUpdateDescriptorSetWithTemplate");
table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport) gpa(device, "vkGetDescriptorSetLayoutSupport");
+ table->CmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount) gpa(device, "vkCmdDrawIndirectCount");
+ table->CmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount) gpa(device, "vkCmdDrawIndexedIndirectCount");
+ table->CreateRenderPass2 = (PFN_vkCreateRenderPass2) gpa(device, "vkCreateRenderPass2");
+ table->CmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2) gpa(device, "vkCmdBeginRenderPass2");
+ table->CmdNextSubpass2 = (PFN_vkCmdNextSubpass2) gpa(device, "vkCmdNextSubpass2");
+ table->CmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2) gpa(device, "vkCmdEndRenderPass2");
+ table->ResetQueryPool = (PFN_vkResetQueryPool) gpa(device, "vkResetQueryPool");
+ table->GetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue) gpa(device, "vkGetSemaphoreCounterValue");
+ table->WaitSemaphores = (PFN_vkWaitSemaphores) gpa(device, "vkWaitSemaphores");
+ table->SignalSemaphore = (PFN_vkSignalSemaphore) gpa(device, "vkSignalSemaphore");
+ table->GetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress) gpa(device, "vkGetBufferDeviceAddress");
+ table->GetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress) gpa(device, "vkGetBufferOpaqueCaptureAddress");
+ table->GetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress) gpa(device, "vkGetDeviceMemoryOpaqueCaptureAddress");
table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
if (table->CreateSwapchainKHR == nullptr) { table->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)StubCreateSwapchainKHR; }
table->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
@@ -412,6 +430,10 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp
if (table->ImportFenceFdKHR == nullptr) { table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR)StubImportFenceFdKHR; }
table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR) gpa(device, "vkGetFenceFdKHR");
if (table->GetFenceFdKHR == nullptr) { table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR)StubGetFenceFdKHR; }
+ table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR) gpa(device, "vkAcquireProfilingLockKHR");
+ if (table->AcquireProfilingLockKHR == nullptr) { table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)StubAcquireProfilingLockKHR; }
+ table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR) gpa(device, "vkReleaseProfilingLockKHR");
+ if (table->ReleaseProfilingLockKHR == nullptr) { table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)StubReleaseProfilingLockKHR; }
table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR) gpa(device, "vkGetImageMemoryRequirements2KHR");
if (table->GetImageMemoryRequirements2KHR == nullptr) { table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)StubGetImageMemoryRequirements2KHR; }
table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR) gpa(device, "vkGetBufferMemoryRequirements2KHR");
@@ -438,6 +460,12 @@ static inline void layer_init_device_dispatch_table(VkDevice device, VkLayerDisp
if (table->WaitSemaphoresKHR == nullptr) { table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)StubWaitSemaphoresKHR; }
table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR) gpa(device, "vkSignalSemaphoreKHR");
if (table->SignalSemaphoreKHR == nullptr) { table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)StubSignalSemaphoreKHR; }
+ table->GetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR) gpa(device, "vkGetBufferDeviceAddressKHR");
+ if (table->GetBufferDeviceAddressKHR == nullptr) { table->GetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)StubGetBufferDeviceAddressKHR; }
+ table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR) gpa(device, "vkGetBufferOpaqueCaptureAddressKHR");
+ if (table->GetBufferOpaqueCaptureAddressKHR == nullptr) { table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)StubGetBufferOpaqueCaptureAddressKHR; }
+ table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR) gpa(device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
+ if (table->GetDeviceMemoryOpaqueCaptureAddressKHR == nullptr) { table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)StubGetDeviceMemoryOpaqueCaptureAddressKHR; }
table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR) gpa(device, "vkGetPipelineExecutablePropertiesKHR");
if (table->GetPipelineExecutablePropertiesKHR == nullptr) { table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)StubGetPipelineExecutablePropertiesKHR; }
table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR) gpa(device, "vkGetPipelineExecutableStatisticsKHR");
@@ -710,6 +738,8 @@ static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLay
table->GetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR");
table->GetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR");
table->GetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR) gpa(instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+ table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR) gpa(instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+ table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR) gpa(instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
table->GetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR");
table->GetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormats2KHR");
table->GetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR) gpa(instance, "vkGetPhysicalDeviceDisplayProperties2KHR");
@@ -752,6 +782,7 @@ static inline void layer_init_instance_dispatch_table(VkInstance instance, VkLay
#ifdef VK_USE_PLATFORM_METAL_EXT
table->CreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT) gpa(instance, "vkCreateMetalSurfaceEXT");
#endif // VK_USE_PLATFORM_METAL_EXT
+ table->GetPhysicalDeviceToolPropertiesEXT = (PFN_vkGetPhysicalDeviceToolPropertiesEXT) gpa(instance, "vkGetPhysicalDeviceToolPropertiesEXT");
table->GetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV) gpa(instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
table->GetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV) gpa(instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV");
#ifdef VK_USE_PLATFORM_WIN32_KHR
diff --git a/thirdparty/vulkan/loader/vk_layer_dispatch_table.h b/thirdparty/vulkan/loader/vk_layer_dispatch_table.h
index 1f0342dc49..b919447e55 100644
--- a/thirdparty/vulkan/loader/vk_layer_dispatch_table.h
+++ b/thirdparty/vulkan/loader/vk_layer_dispatch_table.h
@@ -140,6 +140,10 @@ typedef struct VkLayerInstanceDispatchTable_ {
// ---- VK_KHR_external_fence_capabilities extension commands
PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR GetPhysicalDeviceExternalFencePropertiesKHR;
+ // ---- VK_KHR_performance_query extension commands
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+ PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR;
PFN_vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR;
@@ -216,6 +220,9 @@ typedef struct VkLayerInstanceDispatchTable_ {
PFN_vkCreateMetalSurfaceEXT CreateMetalSurfaceEXT;
#endif // VK_USE_PLATFORM_METAL_EXT
+ // ---- VK_EXT_tooling_info extension commands
+ PFN_vkGetPhysicalDeviceToolPropertiesEXT GetPhysicalDeviceToolPropertiesEXT;
+
// ---- VK_NV_cooperative_matrix extension commands
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV GetPhysicalDeviceCooperativeMatrixPropertiesNV;
@@ -375,6 +382,21 @@ typedef struct VkLayerDispatchTable_ {
PFN_vkUpdateDescriptorSetWithTemplate UpdateDescriptorSetWithTemplate;
PFN_vkGetDescriptorSetLayoutSupport GetDescriptorSetLayoutSupport;
+ // ---- Core 1_2 commands
+ PFN_vkCmdDrawIndirectCount CmdDrawIndirectCount;
+ PFN_vkCmdDrawIndexedIndirectCount CmdDrawIndexedIndirectCount;
+ PFN_vkCreateRenderPass2 CreateRenderPass2;
+ PFN_vkCmdBeginRenderPass2 CmdBeginRenderPass2;
+ PFN_vkCmdNextSubpass2 CmdNextSubpass2;
+ PFN_vkCmdEndRenderPass2 CmdEndRenderPass2;
+ PFN_vkResetQueryPool ResetQueryPool;
+ PFN_vkGetSemaphoreCounterValue GetSemaphoreCounterValue;
+ PFN_vkWaitSemaphores WaitSemaphores;
+ PFN_vkSignalSemaphore SignalSemaphore;
+ PFN_vkGetBufferDeviceAddress GetBufferDeviceAddress;
+ PFN_vkGetBufferOpaqueCaptureAddress GetBufferOpaqueCaptureAddress;
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddress GetDeviceMemoryOpaqueCaptureAddress;
+
// ---- VK_KHR_swapchain extension commands
PFN_vkCreateSwapchainKHR CreateSwapchainKHR;
PFN_vkDestroySwapchainKHR DestroySwapchainKHR;
@@ -450,6 +472,10 @@ typedef struct VkLayerDispatchTable_ {
PFN_vkImportFenceFdKHR ImportFenceFdKHR;
PFN_vkGetFenceFdKHR GetFenceFdKHR;
+ // ---- VK_KHR_performance_query extension commands
+ PFN_vkAcquireProfilingLockKHR AcquireProfilingLockKHR;
+ PFN_vkReleaseProfilingLockKHR ReleaseProfilingLockKHR;
+
// ---- VK_KHR_get_memory_requirements2 extension commands
PFN_vkGetImageMemoryRequirements2KHR GetImageMemoryRequirements2KHR;
PFN_vkGetBufferMemoryRequirements2KHR GetBufferMemoryRequirements2KHR;
@@ -475,6 +501,11 @@ typedef struct VkLayerDispatchTable_ {
PFN_vkWaitSemaphoresKHR WaitSemaphoresKHR;
PFN_vkSignalSemaphoreKHR SignalSemaphoreKHR;
+ // ---- VK_KHR_buffer_device_address extension commands
+ PFN_vkGetBufferDeviceAddressKHR GetBufferDeviceAddressKHR;
+ PFN_vkGetBufferOpaqueCaptureAddressKHR GetBufferOpaqueCaptureAddressKHR;
+ PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR GetDeviceMemoryOpaqueCaptureAddressKHR;
+
// ---- VK_KHR_pipeline_executable_properties extension commands
PFN_vkGetPipelineExecutablePropertiesKHR GetPipelineExecutablePropertiesKHR;
PFN_vkGetPipelineExecutableStatisticsKHR GetPipelineExecutableStatisticsKHR;
diff --git a/thirdparty/vulkan/loader/vk_loader_extensions.c b/thirdparty/vulkan/loader/vk_loader_extensions.c
index c7a55cf11a..542b8b4af4 100644
--- a/thirdparty/vulkan/loader/vk_loader_extensions.c
+++ b/thirdparty/vulkan/loader/vk_loader_extensions.c
@@ -172,6 +172,10 @@ VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_t
// ---- VK_KHR_external_fence_capabilities extension commands
LOOKUP_GIPA(GetPhysicalDeviceExternalFencePropertiesKHR, false);
+ // ---- VK_KHR_performance_query extension commands
+ LOOKUP_GIPA(EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, false);
+ LOOKUP_GIPA(GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, false);
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
LOOKUP_GIPA(GetPhysicalDeviceSurfaceCapabilities2KHR, false);
LOOKUP_GIPA(GetPhysicalDeviceSurfaceFormats2KHR, false);
@@ -234,6 +238,12 @@ VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_t
// ---- VK_EXT_debug_utils extension commands
LOOKUP_GIPA(SetDebugUtilsObjectNameEXT, false);
LOOKUP_GIPA(SetDebugUtilsObjectTagEXT, false);
+ LOOKUP_GIPA(QueueBeginDebugUtilsLabelEXT, false);
+ LOOKUP_GIPA(QueueEndDebugUtilsLabelEXT, false);
+ LOOKUP_GIPA(QueueInsertDebugUtilsLabelEXT, false);
+ LOOKUP_GIPA(CmdBeginDebugUtilsLabelEXT, false);
+ LOOKUP_GIPA(CmdEndDebugUtilsLabelEXT, false);
+ LOOKUP_GIPA(CmdInsertDebugUtilsLabelEXT, false);
LOOKUP_GIPA(CreateDebugUtilsMessengerEXT, false);
LOOKUP_GIPA(DestroyDebugUtilsMessengerEXT, false);
LOOKUP_GIPA(SubmitDebugUtilsMessageEXT, false);
@@ -254,6 +264,9 @@ VKAPI_ATTR bool VKAPI_CALL loader_icd_init_entries(struct loader_icd_term *icd_t
LOOKUP_GIPA(CreateMetalSurfaceEXT, false);
#endif // VK_USE_PLATFORM_METAL_EXT
+ // ---- VK_EXT_tooling_info extension commands
+ LOOKUP_GIPA(GetPhysicalDeviceToolPropertiesEXT, false);
+
// ---- VK_NV_cooperative_matrix extension commands
LOOKUP_GIPA(GetPhysicalDeviceCooperativeMatrixPropertiesNV, false);
@@ -422,6 +435,21 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_dispatch_table(struct loader_dev_d
table->DestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)gpa(dev, "vkDestroyDescriptorUpdateTemplate");
table->UpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)gpa(dev, "vkUpdateDescriptorSetWithTemplate");
table->GetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)gpa(dev, "vkGetDescriptorSetLayoutSupport");
+
+ // ---- Core 1_2 commands
+ table->CmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)gpa(dev, "vkCmdDrawIndirectCount");
+ table->CmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)gpa(dev, "vkCmdDrawIndexedIndirectCount");
+ table->CreateRenderPass2 = (PFN_vkCreateRenderPass2)gpa(dev, "vkCreateRenderPass2");
+ table->CmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)gpa(dev, "vkCmdBeginRenderPass2");
+ table->CmdNextSubpass2 = (PFN_vkCmdNextSubpass2)gpa(dev, "vkCmdNextSubpass2");
+ table->CmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)gpa(dev, "vkCmdEndRenderPass2");
+ table->ResetQueryPool = (PFN_vkResetQueryPool)gpa(dev, "vkResetQueryPool");
+ table->GetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)gpa(dev, "vkGetSemaphoreCounterValue");
+ table->WaitSemaphores = (PFN_vkWaitSemaphores)gpa(dev, "vkWaitSemaphores");
+ table->SignalSemaphore = (PFN_vkSignalSemaphore)gpa(dev, "vkSignalSemaphore");
+ table->GetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)gpa(dev, "vkGetBufferDeviceAddress");
+ table->GetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)gpa(dev, "vkGetBufferOpaqueCaptureAddress");
+ table->GetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)gpa(dev, "vkGetDeviceMemoryOpaqueCaptureAddress");
}
// Init Device function pointer dispatch table with extension commands
@@ -507,6 +535,10 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo
table->ImportFenceFdKHR = (PFN_vkImportFenceFdKHR)gdpa(dev, "vkImportFenceFdKHR");
table->GetFenceFdKHR = (PFN_vkGetFenceFdKHR)gdpa(dev, "vkGetFenceFdKHR");
+ // ---- VK_KHR_performance_query extension commands
+ table->AcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)gdpa(dev, "vkAcquireProfilingLockKHR");
+ table->ReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)gdpa(dev, "vkReleaseProfilingLockKHR");
+
// ---- VK_KHR_get_memory_requirements2 extension commands
table->GetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)gdpa(dev, "vkGetImageMemoryRequirements2KHR");
table->GetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)gdpa(dev, "vkGetBufferMemoryRequirements2KHR");
@@ -532,6 +564,11 @@ VKAPI_ATTR void VKAPI_CALL loader_init_device_extension_dispatch_table(struct lo
table->WaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)gdpa(dev, "vkWaitSemaphoresKHR");
table->SignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)gdpa(dev, "vkSignalSemaphoreKHR");
+ // ---- VK_KHR_buffer_device_address extension commands
+ table->GetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)gdpa(dev, "vkGetBufferDeviceAddressKHR");
+ table->GetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)gdpa(dev, "vkGetBufferOpaqueCaptureAddressKHR");
+ table->GetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)gdpa(dev, "vkGetDeviceMemoryOpaqueCaptureAddressKHR");
+
// ---- VK_KHR_pipeline_executable_properties extension commands
table->GetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)gdpa(dev, "vkGetPipelineExecutablePropertiesKHR");
table->GetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)gdpa(dev, "vkGetPipelineExecutableStatisticsKHR");
@@ -818,6 +855,10 @@ VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayer
// ---- VK_KHR_external_fence_capabilities extension commands
table->GetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)gpa(inst, "vkGetPhysicalDeviceExternalFencePropertiesKHR");
+ // ---- VK_KHR_performance_query extension commands
+ table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)gpa(inst, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+ table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)gpa(inst, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
table->GetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)gpa(inst, "vkGetPhysicalDeviceSurfaceCapabilities2KHR");
table->GetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)gpa(inst, "vkGetPhysicalDeviceSurfaceFormats2KHR");
@@ -894,6 +935,9 @@ VKAPI_ATTR void VKAPI_CALL loader_init_instance_extension_dispatch_table(VkLayer
table->CreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT)gpa(inst, "vkCreateMetalSurfaceEXT");
#endif // VK_USE_PLATFORM_METAL_EXT
+ // ---- VK_EXT_tooling_info extension commands
+ table->GetPhysicalDeviceToolPropertiesEXT = (PFN_vkGetPhysicalDeviceToolPropertiesEXT)gpa(inst, "vkGetPhysicalDeviceToolPropertiesEXT");
+
// ---- VK_NV_cooperative_matrix extension commands
table->GetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)gpa(inst, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV");
@@ -1056,6 +1100,21 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
if (!strcmp(name, "UpdateDescriptorSetWithTemplate")) return (void *)table->UpdateDescriptorSetWithTemplate;
if (!strcmp(name, "GetDescriptorSetLayoutSupport")) return (void *)table->GetDescriptorSetLayoutSupport;
+ // ---- Core 1_2 commands
+ if (!strcmp(name, "CmdDrawIndirectCount")) return (void *)table->CmdDrawIndirectCount;
+ if (!strcmp(name, "CmdDrawIndexedIndirectCount")) return (void *)table->CmdDrawIndexedIndirectCount;
+ if (!strcmp(name, "CreateRenderPass2")) return (void *)table->CreateRenderPass2;
+ if (!strcmp(name, "CmdBeginRenderPass2")) return (void *)table->CmdBeginRenderPass2;
+ if (!strcmp(name, "CmdNextSubpass2")) return (void *)table->CmdNextSubpass2;
+ if (!strcmp(name, "CmdEndRenderPass2")) return (void *)table->CmdEndRenderPass2;
+ if (!strcmp(name, "ResetQueryPool")) return (void *)table->ResetQueryPool;
+ if (!strcmp(name, "GetSemaphoreCounterValue")) return (void *)table->GetSemaphoreCounterValue;
+ if (!strcmp(name, "WaitSemaphores")) return (void *)table->WaitSemaphores;
+ if (!strcmp(name, "SignalSemaphore")) return (void *)table->SignalSemaphore;
+ if (!strcmp(name, "GetBufferDeviceAddress")) return (void *)table->GetBufferDeviceAddress;
+ if (!strcmp(name, "GetBufferOpaqueCaptureAddress")) return (void *)table->GetBufferOpaqueCaptureAddress;
+ if (!strcmp(name, "GetDeviceMemoryOpaqueCaptureAddress")) return (void *)table->GetDeviceMemoryOpaqueCaptureAddress;
+
// ---- VK_KHR_swapchain extension commands
if (!strcmp(name, "CreateSwapchainKHR")) return (void *)table->CreateSwapchainKHR;
if (!strcmp(name, "DestroySwapchainKHR")) return (void *)table->DestroySwapchainKHR;
@@ -1131,6 +1190,10 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
if (!strcmp(name, "ImportFenceFdKHR")) return (void *)table->ImportFenceFdKHR;
if (!strcmp(name, "GetFenceFdKHR")) return (void *)table->GetFenceFdKHR;
+ // ---- VK_KHR_performance_query extension commands
+ if (!strcmp(name, "AcquireProfilingLockKHR")) return (void *)table->AcquireProfilingLockKHR;
+ if (!strcmp(name, "ReleaseProfilingLockKHR")) return (void *)table->ReleaseProfilingLockKHR;
+
// ---- VK_KHR_get_memory_requirements2 extension commands
if (!strcmp(name, "GetImageMemoryRequirements2KHR")) return (void *)table->GetImageMemoryRequirements2KHR;
if (!strcmp(name, "GetBufferMemoryRequirements2KHR")) return (void *)table->GetBufferMemoryRequirements2KHR;
@@ -1156,6 +1219,11 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_device_dispatch_table(const VkLayerDis
if (!strcmp(name, "WaitSemaphoresKHR")) return (void *)table->WaitSemaphoresKHR;
if (!strcmp(name, "SignalSemaphoreKHR")) return (void *)table->SignalSemaphoreKHR;
+ // ---- VK_KHR_buffer_device_address extension commands
+ if (!strcmp(name, "GetBufferDeviceAddressKHR")) return (void *)table->GetBufferDeviceAddressKHR;
+ if (!strcmp(name, "GetBufferOpaqueCaptureAddressKHR")) return (void *)table->GetBufferOpaqueCaptureAddressKHR;
+ if (!strcmp(name, "GetDeviceMemoryOpaqueCaptureAddressKHR")) return (void *)table->GetDeviceMemoryOpaqueCaptureAddressKHR;
+
// ---- VK_KHR_pipeline_executable_properties extension commands
if (!strcmp(name, "GetPipelineExecutablePropertiesKHR")) return (void *)table->GetPipelineExecutablePropertiesKHR;
if (!strcmp(name, "GetPipelineExecutableStatisticsKHR")) return (void *)table->GetPipelineExecutableStatisticsKHR;
@@ -1446,6 +1514,10 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerI
// ---- VK_KHR_external_fence_capabilities extension commands
if (!strcmp(name, "GetPhysicalDeviceExternalFencePropertiesKHR")) return (void *)table->GetPhysicalDeviceExternalFencePropertiesKHR;
+ // ---- VK_KHR_performance_query extension commands
+ if (!strcmp(name, "EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR")) return (void *)table->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+ if (!strcmp(name, "GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR")) return (void *)table->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
if (!strcmp(name, "GetPhysicalDeviceSurfaceCapabilities2KHR")) return (void *)table->GetPhysicalDeviceSurfaceCapabilities2KHR;
if (!strcmp(name, "GetPhysicalDeviceSurfaceFormats2KHR")) return (void *)table->GetPhysicalDeviceSurfaceFormats2KHR;
@@ -1522,6 +1594,9 @@ VKAPI_ATTR void* VKAPI_CALL loader_lookup_instance_dispatch_table(const VkLayerI
if (!strcmp(name, "CreateMetalSurfaceEXT")) return (void *)table->CreateMetalSurfaceEXT;
#endif // VK_USE_PLATFORM_METAL_EXT
+ // ---- VK_EXT_tooling_info extension commands
+ if (!strcmp(name, "GetPhysicalDeviceToolPropertiesEXT")) return (void *)table->GetPhysicalDeviceToolPropertiesEXT;
+
// ---- VK_NV_cooperative_matrix extension commands
if (!strcmp(name, "GetPhysicalDeviceCooperativeMatrixPropertiesNV")) return (void *)table->GetPhysicalDeviceCooperativeMatrixPropertiesNV;
@@ -1725,7 +1800,7 @@ VKAPI_ATTR void VKAPI_CALL UpdateDescriptorSetWithTemplateKHR(
VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
VkDevice device,
- const VkRenderPassCreateInfo2KHR* pCreateInfo,
+ const VkRenderPassCreateInfo2* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkRenderPass* pRenderPass) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
@@ -1735,22 +1810,22 @@ VKAPI_ATTR VkResult VKAPI_CALL CreateRenderPass2KHR(
VKAPI_ATTR void VKAPI_CALL CmdBeginRenderPass2KHR(
VkCommandBuffer commandBuffer,
const VkRenderPassBeginInfo* pRenderPassBegin,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo) {
+ const VkSubpassBeginInfo* pSubpassBeginInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
}
VKAPI_ATTR void VKAPI_CALL CmdNextSubpass2KHR(
VkCommandBuffer commandBuffer,
- const VkSubpassBeginInfoKHR* pSubpassBeginInfo,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
+ const VkSubpassBeginInfo* pSubpassBeginInfo,
+ const VkSubpassEndInfo* pSubpassEndInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
}
VKAPI_ATTR void VKAPI_CALL CmdEndRenderPass2KHR(
VkCommandBuffer commandBuffer,
- const VkSubpassEndInfoKHR* pSubpassEndInfo) {
+ const VkSubpassEndInfo* pSubpassEndInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
disp->CmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo);
}
@@ -1806,6 +1881,72 @@ VKAPI_ATTR VkResult VKAPI_CALL GetFenceFdKHR(
}
+// ---- VK_KHR_performance_query extension trampoline/terminators
+
+VKAPI_ATTR VkResult VKAPI_CALL EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ uint32_t* pCounterCount,
+ VkPerformanceCounterKHR* pCounters,
+ VkPerformanceCounterDescriptionKHR* pCounterDescriptions) {
+ const VkLayerInstanceDispatchTable *disp;
+ VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+ disp = loader_get_instance_layer_dispatch(physicalDevice);
+ return disp->EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(unwrapped_phys_dev, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL terminator_EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ uint32_t* pCounterCount,
+ VkPerformanceCounterKHR* pCounters,
+ VkPerformanceCounterDescriptionKHR* pCounterDescriptions) {
+ struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+ struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+ if (NULL == icd_term->dispatch.EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR) {
+ loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+ "ICD associated with VkPhysicalDevice does not support EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR");
+ }
+ return icd_term->dispatch.EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(phys_dev_term->phys_dev, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions);
+}
+
+VKAPI_ATTR void VKAPI_CALL GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ VkPhysicalDevice physicalDevice,
+ const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo,
+ uint32_t* pNumPasses) {
+ const VkLayerInstanceDispatchTable *disp;
+ VkPhysicalDevice unwrapped_phys_dev = loader_unwrap_physical_device(physicalDevice);
+ disp = loader_get_instance_layer_dispatch(physicalDevice);
+ disp->GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(unwrapped_phys_dev, pPerformanceQueryCreateInfo, pNumPasses);
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+ VkPhysicalDevice physicalDevice,
+ const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo,
+ uint32_t* pNumPasses) {
+ struct loader_physical_device_term *phys_dev_term = (struct loader_physical_device_term *)physicalDevice;
+ struct loader_icd_term *icd_term = phys_dev_term->this_icd_term;
+ if (NULL == icd_term->dispatch.GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR) {
+ loader_log(icd_term->this_instance, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
+ "ICD associated with VkPhysicalDevice does not support GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR");
+ }
+ icd_term->dispatch.GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(phys_dev_term->phys_dev, pPerformanceQueryCreateInfo, pNumPasses);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL AcquireProfilingLockKHR(
+ VkDevice device,
+ const VkAcquireProfilingLockInfoKHR* pInfo) {
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->AcquireProfilingLockKHR(device, pInfo);
+}
+
+VKAPI_ATTR void VKAPI_CALL ReleaseProfilingLockKHR(
+ VkDevice device) {
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ disp->ReleaseProfilingLockKHR(device);
+}
+
+
// ---- VK_KHR_get_memory_requirements2 extension trampoline/terminators
VKAPI_ATTR void VKAPI_CALL GetImageMemoryRequirements2KHR(
@@ -1923,7 +2064,7 @@ VKAPI_ATTR VkResult VKAPI_CALL GetSemaphoreCounterValueKHR(
VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
VkDevice device,
- const VkSemaphoreWaitInfoKHR* pWaitInfo,
+ const VkSemaphoreWaitInfo* pWaitInfo,
uint64_t timeout) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->WaitSemaphoresKHR(device, pWaitInfo, timeout);
@@ -1931,12 +2072,36 @@ VKAPI_ATTR VkResult VKAPI_CALL WaitSemaphoresKHR(
VKAPI_ATTR VkResult VKAPI_CALL SignalSemaphoreKHR(
VkDevice device,
- const VkSemaphoreSignalInfoKHR* pSignalInfo) {
+ const VkSemaphoreSignalInfo* pSignalInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->SignalSemaphoreKHR(device, pSignalInfo);
}
+// ---- VK_KHR_buffer_device_address extension trampoline/terminators
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressKHR(
+ VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo) {
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetBufferDeviceAddressKHR(device, pInfo);
+}
+
+VKAPI_ATTR uint64_t VKAPI_CALL GetBufferOpaqueCaptureAddressKHR(
+ VkDevice device,
+ const VkBufferDeviceAddressInfo* pInfo) {
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetBufferOpaqueCaptureAddressKHR(device, pInfo);
+}
+
+VKAPI_ATTR uint64_t VKAPI_CALL GetDeviceMemoryOpaqueCaptureAddressKHR(
+ VkDevice device,
+ const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) {
+ const VkLayerDispatchTable *disp = loader_get_dispatch(device);
+ return disp->GetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo);
+}
+
+
// ---- VK_KHR_pipeline_executable_properties extension trampoline/terminators
VKAPI_ATTR VkResult VKAPI_CALL GetPipelineExecutablePropertiesKHR(
@@ -2552,7 +2717,18 @@ VKAPI_ATTR void VKAPI_CALL QueueBeginDebugUtilsLabelEXT(
const VkDebugUtilsLabelEXT* pLabelInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
if (disp->QueueBeginDebugUtilsLabelEXT != NULL) {
- disp->QueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+ disp->QueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
+ }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_QueueBeginDebugUtilsLabelEXT(
+ VkQueue queue,
+ const VkDebugUtilsLabelEXT* pLabelInfo) {
+ uint32_t icd_index = 0;
+ struct loader_device *dev;
+ struct loader_icd_term *icd_term = loader_get_icd_and_device(queue, &dev, &icd_index);
+ if (NULL != icd_term && NULL != icd_term->dispatch.QueueBeginDebugUtilsLabelEXT) {
+ icd_term->dispatch.QueueBeginDebugUtilsLabelEXT(queue, pLabelInfo);
}
}
@@ -2560,7 +2736,17 @@ VKAPI_ATTR void VKAPI_CALL QueueEndDebugUtilsLabelEXT(
VkQueue queue) {
const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
if (disp->QueueEndDebugUtilsLabelEXT != NULL) {
- disp->QueueEndDebugUtilsLabelEXT(queue);
+ disp->QueueEndDebugUtilsLabelEXT(queue);
+ }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_QueueEndDebugUtilsLabelEXT(
+ VkQueue queue) {
+ uint32_t icd_index = 0;
+ struct loader_device *dev;
+ struct loader_icd_term *icd_term = loader_get_icd_and_device(queue, &dev, &icd_index);
+ if (NULL != icd_term && NULL != icd_term->dispatch.QueueEndDebugUtilsLabelEXT) {
+ icd_term->dispatch.QueueEndDebugUtilsLabelEXT(queue);
}
}
@@ -2569,7 +2755,18 @@ VKAPI_ATTR void VKAPI_CALL QueueInsertDebugUtilsLabelEXT(
const VkDebugUtilsLabelEXT* pLabelInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(queue);
if (disp->QueueInsertDebugUtilsLabelEXT != NULL) {
- disp->QueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+ disp->QueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
+ }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_QueueInsertDebugUtilsLabelEXT(
+ VkQueue queue,
+ const VkDebugUtilsLabelEXT* pLabelInfo) {
+ uint32_t icd_index = 0;
+ struct loader_device *dev;
+ struct loader_icd_term *icd_term = loader_get_icd_and_device(queue, &dev, &icd_index);
+ if (NULL != icd_term && NULL != icd_term->dispatch.QueueInsertDebugUtilsLabelEXT) {
+ icd_term->dispatch.QueueInsertDebugUtilsLabelEXT(queue, pLabelInfo);
}
}
@@ -2578,7 +2775,18 @@ VKAPI_ATTR void VKAPI_CALL CmdBeginDebugUtilsLabelEXT(
const VkDebugUtilsLabelEXT* pLabelInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
if (disp->CmdBeginDebugUtilsLabelEXT != NULL) {
- disp->CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+ disp->CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+ }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_CmdBeginDebugUtilsLabelEXT(
+ VkCommandBuffer commandBuffer,
+ const VkDebugUtilsLabelEXT* pLabelInfo) {
+ uint32_t icd_index = 0;
+ struct loader_device *dev;
+ struct loader_icd_term *icd_term = loader_get_icd_and_device(commandBuffer, &dev, &icd_index);
+ if (NULL != icd_term && NULL != icd_term->dispatch.CmdBeginDebugUtilsLabelEXT) {
+ icd_term->dispatch.CmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
}
}
@@ -2586,7 +2794,17 @@ VKAPI_ATTR void VKAPI_CALL CmdEndDebugUtilsLabelEXT(
VkCommandBuffer commandBuffer) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
if (disp->CmdEndDebugUtilsLabelEXT != NULL) {
- disp->CmdEndDebugUtilsLabelEXT(commandBuffer);
+ disp->CmdEndDebugUtilsLabelEXT(commandBuffer);
+ }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_CmdEndDebugUtilsLabelEXT(
+ VkCommandBuffer commandBuffer) {
+ uint32_t icd_index = 0;
+ struct loader_device *dev;
+ struct loader_icd_term *icd_term = loader_get_icd_and_device(commandBuffer, &dev, &icd_index);
+ if (NULL != icd_term && NULL != icd_term->dispatch.CmdEndDebugUtilsLabelEXT) {
+ icd_term->dispatch.CmdEndDebugUtilsLabelEXT(commandBuffer);
}
}
@@ -2595,7 +2813,18 @@ VKAPI_ATTR void VKAPI_CALL CmdInsertDebugUtilsLabelEXT(
const VkDebugUtilsLabelEXT* pLabelInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(commandBuffer);
if (disp->CmdInsertDebugUtilsLabelEXT != NULL) {
- disp->CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+ disp->CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
+ }
+}
+
+VKAPI_ATTR void VKAPI_CALL terminator_CmdInsertDebugUtilsLabelEXT(
+ VkCommandBuffer commandBuffer,
+ const VkDebugUtilsLabelEXT* pLabelInfo) {
+ uint32_t icd_index = 0;
+ struct loader_device *dev;
+ struct loader_icd_term *icd_term = loader_get_icd_and_device(commandBuffer, &dev, &icd_index);
+ if (NULL != icd_term && NULL != icd_term->dispatch.CmdInsertDebugUtilsLabelEXT) {
+ icd_term->dispatch.CmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo);
}
}
@@ -3091,7 +3320,7 @@ VKAPI_ATTR VkResult VKAPI_CALL terminator_CreateImagePipeSurfaceFUCHSIA(
VKAPI_ATTR VkDeviceAddress VKAPI_CALL GetBufferDeviceAddressEXT(
VkDevice device,
- const VkBufferDeviceAddressInfoEXT* pInfo) {
+ const VkBufferDeviceAddressInfo* pInfo) {
const VkLayerDispatchTable *disp = loader_get_dispatch(device);
return disp->GetBufferDeviceAddressEXT(device, pInfo);
}
@@ -3413,6 +3642,24 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
return true;
}
+ // ---- VK_KHR_performance_query extension commands
+ if (!strcmp("vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR", name)) {
+ *addr = (void *)EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+ return true;
+ }
+ if (!strcmp("vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR", name)) {
+ *addr = (void *)GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+ return true;
+ }
+ if (!strcmp("vkAcquireProfilingLockKHR", name)) {
+ *addr = (void *)AcquireProfilingLockKHR;
+ return true;
+ }
+ if (!strcmp("vkReleaseProfilingLockKHR", name)) {
+ *addr = (void *)ReleaseProfilingLockKHR;
+ return true;
+ }
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilities2KHR", name)) {
*addr = (ptr_instance->enabled_known_extensions.khr_get_surface_capabilities2 == 1)
@@ -3491,6 +3738,20 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
return true;
}
+ // ---- VK_KHR_buffer_device_address extension commands
+ if (!strcmp("vkGetBufferDeviceAddressKHR", name)) {
+ *addr = (void *)GetBufferDeviceAddressKHR;
+ return true;
+ }
+ if (!strcmp("vkGetBufferOpaqueCaptureAddressKHR", name)) {
+ *addr = (void *)GetBufferOpaqueCaptureAddressKHR;
+ return true;
+ }
+ if (!strcmp("vkGetDeviceMemoryOpaqueCaptureAddressKHR", name)) {
+ *addr = (void *)GetDeviceMemoryOpaqueCaptureAddressKHR;
+ return true;
+ }
+
// ---- VK_KHR_pipeline_executable_properties extension commands
if (!strcmp("vkGetPipelineExecutablePropertiesKHR", name)) {
*addr = (void *)GetPipelineExecutablePropertiesKHR;
@@ -4013,6 +4274,12 @@ bool extension_instance_gpa(struct loader_instance *ptr_instance, const char *na
return true;
}
+ // ---- VK_EXT_tooling_info extension commands
+ if (!strcmp("vkGetPhysicalDeviceToolPropertiesEXT", name)) {
+ *addr = (void *)GetPhysicalDeviceToolPropertiesEXT;
+ return true;
+ }
+
// ---- VK_NV_cooperative_matrix extension commands
if (!strcmp("vkGetPhysicalDeviceCooperativeMatrixPropertiesNV", name)) {
*addr = (void *)GetPhysicalDeviceCooperativeMatrixPropertiesNV;
@@ -4173,6 +4440,18 @@ PFN_vkVoidFunction get_extension_device_proc_terminator(struct loader_device *de
addr = (PFN_vkVoidFunction)terminator_SetDebugUtilsObjectNameEXT;
} else if(!strcmp(pName, "vkSetDebugUtilsObjectTagEXT")) {
addr = (PFN_vkVoidFunction)terminator_SetDebugUtilsObjectTagEXT;
+ } else if(!strcmp(pName, "vkQueueBeginDebugUtilsLabelEXT")) {
+ addr = (PFN_vkVoidFunction)terminator_QueueBeginDebugUtilsLabelEXT;
+ } else if(!strcmp(pName, "vkQueueEndDebugUtilsLabelEXT")) {
+ addr = (PFN_vkVoidFunction)terminator_QueueEndDebugUtilsLabelEXT;
+ } else if(!strcmp(pName, "vkQueueInsertDebugUtilsLabelEXT")) {
+ addr = (PFN_vkVoidFunction)terminator_QueueInsertDebugUtilsLabelEXT;
+ } else if(!strcmp(pName, "vkCmdBeginDebugUtilsLabelEXT")) {
+ addr = (PFN_vkVoidFunction)terminator_CmdBeginDebugUtilsLabelEXT;
+ } else if(!strcmp(pName, "vkCmdEndDebugUtilsLabelEXT")) {
+ addr = (PFN_vkVoidFunction)terminator_CmdEndDebugUtilsLabelEXT;
+ } else if(!strcmp(pName, "vkCmdInsertDebugUtilsLabelEXT")) {
+ addr = (PFN_vkVoidFunction)terminator_CmdInsertDebugUtilsLabelEXT;
}
}
#ifdef VK_USE_PLATFORM_WIN32_KHR
@@ -4296,6 +4575,10 @@ const VkLayerInstanceDispatchTable instance_disp = {
// ---- VK_KHR_external_fence_capabilities extension commands
.GetPhysicalDeviceExternalFencePropertiesKHR = terminator_GetPhysicalDeviceExternalFenceProperties,
+ // ---- VK_KHR_performance_query extension commands
+ .EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = terminator_EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR,
+ .GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = terminator_GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR,
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
.GetPhysicalDeviceSurfaceCapabilities2KHR = terminator_GetPhysicalDeviceSurfaceCapabilities2KHR,
.GetPhysicalDeviceSurfaceFormats2KHR = terminator_GetPhysicalDeviceSurfaceFormats2KHR,
@@ -4372,6 +4655,9 @@ const VkLayerInstanceDispatchTable instance_disp = {
.CreateMetalSurfaceEXT = terminator_CreateMetalSurfaceEXT,
#endif // VK_USE_PLATFORM_METAL_EXT
+ // ---- VK_EXT_tooling_info extension commands
+ .GetPhysicalDeviceToolPropertiesEXT = terminator_GetPhysicalDeviceToolPropertiesEXT,
+
// ---- VK_NV_cooperative_matrix extension commands
.GetPhysicalDeviceCooperativeMatrixPropertiesNV = terminator_GetPhysicalDeviceCooperativeMatrixPropertiesNV,
diff --git a/thirdparty/vulkan/loader/vk_loader_extensions.h b/thirdparty/vulkan/loader/vk_loader_extensions.h
index b08af33838..a50f3cdd32 100644
--- a/thirdparty/vulkan/loader/vk_loader_extensions.h
+++ b/thirdparty/vulkan/loader/vk_loader_extensions.h
@@ -320,6 +320,10 @@ struct loader_icd_term_dispatch {
// ---- VK_KHR_external_fence_capabilities extension commands
PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR GetPhysicalDeviceExternalFencePropertiesKHR;
+ // ---- VK_KHR_performance_query extension commands
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
+ PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR GetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR;
+
// ---- VK_KHR_get_surface_capabilities2 extension commands
PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR GetPhysicalDeviceSurfaceCapabilities2KHR;
PFN_vkGetPhysicalDeviceSurfaceFormats2KHR GetPhysicalDeviceSurfaceFormats2KHR;
@@ -382,6 +386,12 @@ struct loader_icd_term_dispatch {
// ---- VK_EXT_debug_utils extension commands
PFN_vkSetDebugUtilsObjectNameEXT SetDebugUtilsObjectNameEXT;
PFN_vkSetDebugUtilsObjectTagEXT SetDebugUtilsObjectTagEXT;
+ PFN_vkQueueBeginDebugUtilsLabelEXT QueueBeginDebugUtilsLabelEXT;
+ PFN_vkQueueEndDebugUtilsLabelEXT QueueEndDebugUtilsLabelEXT;
+ PFN_vkQueueInsertDebugUtilsLabelEXT QueueInsertDebugUtilsLabelEXT;
+ PFN_vkCmdBeginDebugUtilsLabelEXT CmdBeginDebugUtilsLabelEXT;
+ PFN_vkCmdEndDebugUtilsLabelEXT CmdEndDebugUtilsLabelEXT;
+ PFN_vkCmdInsertDebugUtilsLabelEXT CmdInsertDebugUtilsLabelEXT;
PFN_vkCreateDebugUtilsMessengerEXT CreateDebugUtilsMessengerEXT;
PFN_vkDestroyDebugUtilsMessengerEXT DestroyDebugUtilsMessengerEXT;
PFN_vkSubmitDebugUtilsMessageEXT SubmitDebugUtilsMessageEXT;
@@ -402,6 +412,9 @@ struct loader_icd_term_dispatch {
PFN_vkCreateMetalSurfaceEXT CreateMetalSurfaceEXT;
#endif // VK_USE_PLATFORM_METAL_EXT
+ // ---- VK_EXT_tooling_info extension commands
+ PFN_vkGetPhysicalDeviceToolPropertiesEXT GetPhysicalDeviceToolPropertiesEXT;
+
// ---- VK_NV_cooperative_matrix extension commands
PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV GetPhysicalDeviceCooperativeMatrixPropertiesNV;
diff --git a/thirdparty/vulkan/loader/vk_loader_platform.h b/thirdparty/vulkan/loader/vk_loader_platform.h
index 2ffda55367..62e8e3ae09 100644
--- a/thirdparty/vulkan/loader/vk_loader_platform.h
+++ b/thirdparty/vulkan/loader/vk_loader_platform.h
@@ -279,31 +279,6 @@ static inline char *loader_platform_dirname(char *path) {
return path;
}
-// WIN32 runtime doesn't have basename().
-// Microsoft also doesn't have basename(). Paths are different on Windows, and
-// so this is just a temporary solution in order to get us compiling, so that we
-// can test some scenarios, and develop the correct solution for Windows.
-// TODO: Develop a better, permanent solution for Windows, to replace this
-// temporary code:
-static char *loader_platform_basename(char *pathname) {
- char *current, *next;
-
- // TODO/TBD: Do we need to deal with the Windows's ":" character?
-
- for (current = pathname; *current != '\0'; current = next) {
- next = strchr(current, DIRECTORY_SYMBOL);
- if (next == NULL) {
- // No more DIRECTORY_SYMBOL's so return p:
- return current;
- } else {
- // Point one character past the DIRECTORY_SYMBOL:
- next++;
- }
- }
- // We shouldn't get to here, but this makes the compiler happy:
- return current;
-}
-
// Dynamic Loading:
typedef HMODULE loader_platform_dl_handle;
static loader_platform_dl_handle loader_platform_open_library(const char *lib_path) {
@@ -336,15 +311,15 @@ static char *loader_platform_get_proc_address_error(const char *name) {
typedef HANDLE loader_platform_thread;
// __declspec(thread) is not supported by MinGW compiler (ignored with warning or
-// cause erorr depending on compiler switches)
+// cause error depending on compiler switches)
//
// __thread should be used instead
//
// __MINGW32__ defined for both 32 and 64 bit MinGW compilers, so it is enough to
-// detect any (32 or 64) flawor of MinGW compiler.
+// detect any (32 or 64) flavor of MinGW compiler.
//
// @note __GNUC__ could be used as a more generic way to detect _any_
-// GCC[-compitible] compiler on Windows, but this fix was tested
+// GCC[-compatible] compiler on Windows, but this fix was tested
// only with MinGW, so keep it explicit at the moment.
#if defined(__MINGW32__)
#define THREAD_LOCAL_DECL __thread
diff --git a/thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch b/thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch
new file mode 100644
index 0000000000..1267e560df
--- /dev/null
+++ b/thirdparty/vulkan/patches/Vulkan-Loader-revert-pr260.patch
@@ -0,0 +1,57 @@
+diff --git a/thirdparty/vulkan/loader/loader.c b/thirdparty/vulkan/loader/loader.c
+index 87d08d5116..c7cdb47122 100644
+--- a/thirdparty/vulkan/loader/loader.c
++++ b/thirdparty/vulkan/loader/loader.c
+@@ -7330,7 +7330,7 @@ out:
+ return result;
+ }
+
+-#if defined(_WIN32)
++#if defined(_WIN32) && defined(LOADER_DYNAMIC_LIB)
+ BOOL WINAPI DllMain(HINSTANCE hinst, DWORD reason, LPVOID reserved) {
+ switch (reason) {
+ case DLL_PROCESS_ATTACH:
+diff --git a/thirdparty/vulkan/loader/loader.h b/thirdparty/vulkan/loader/loader.h
+index 5e9495521b..56745a968d 100644
+--- a/thirdparty/vulkan/loader/loader.h
++++ b/thirdparty/vulkan/loader/loader.h
+@@ -420,6 +420,9 @@ static inline void loader_init_dispatch(void *obj, const void *data) {
+ // Global variables used across files
+ extern struct loader_struct loader;
+ extern THREAD_LOCAL_DECL struct loader_instance *tls_instance;
++#if defined(_WIN32) && !defined(LOADER_DYNAMIC_LIB)
++extern LOADER_PLATFORM_THREAD_ONCE_DEFINITION(once_init);
++#endif
+ extern loader_platform_thread_mutex loader_lock;
+ extern loader_platform_thread_mutex loader_json_lock;
+
+diff --git a/thirdparty/vulkan/loader/vk_loader_platform.h b/thirdparty/vulkan/loader/vk_loader_platform.h
+index 7824e35d6b..62e8e3ae09 100644
+--- a/thirdparty/vulkan/loader/vk_loader_platform.h
++++ b/thirdparty/vulkan/loader/vk_loader_platform.h
+@@ -330,9 +330,25 @@ typedef HANDLE loader_platform_thread;
+ // The once init functionality is not used when building a DLL on Windows. This is because there is no way to clean up the
+ // resources allocated by anything allocated by once init. This isn't a problem for static libraries, but it is for dynamic
+ // ones. When building a DLL, we use DllMain() instead to allow properly cleaning up resources.
++#if defined(LOADER_DYNAMIC_LIB)
+ #define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var)
+ #define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var)
+ #define LOADER_PLATFORM_THREAD_ONCE(ctl, func)
++#else
++#define LOADER_PLATFORM_THREAD_ONCE_DECLARATION(var) INIT_ONCE var = INIT_ONCE_STATIC_INIT;
++#define LOADER_PLATFORM_THREAD_ONCE_DEFINITION(var) INIT_ONCE var;
++#define LOADER_PLATFORM_THREAD_ONCE(ctl, func) loader_platform_thread_once_fn(ctl, func)
++static BOOL CALLBACK InitFuncWrapper(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *Context) {
++ void (*func)(void) = (void (*)(void))Parameter;
++ func();
++ return TRUE;
++}
++static void loader_platform_thread_once_fn(void *ctl, void (*func)(void)) {
++ assert(func != NULL);
++ assert(ctl != NULL);
++ InitOnceExecuteOnce((PINIT_ONCE)ctl, InitFuncWrapper, (void *)func, NULL);
++}
++#endif
+
+ // Thread IDs:
+ typedef DWORD loader_platform_thread_id;
diff --git a/thirdparty/vulkan/vk_enum_string_helper.h b/thirdparty/vulkan/vk_enum_string_helper.h
index a0b955e32b..00c2b9d1d5 100644
--- a/thirdparty/vulkan/vk_enum_string_helper.h
+++ b/thirdparty/vulkan/vk_enum_string_helper.h
@@ -4,10 +4,10 @@
/***************************************************************************
*
- * Copyright (c) 2015-2017 The Khronos Group Inc.
- * Copyright (c) 2015-2017 Valve Corporation
- * Copyright (c) 2015-2017 LunarG, Inc.
- * Copyright (c) 2015-2017 Google Inc.
+ * Copyright (c) 2015-2020 The Khronos Group Inc.
+ * Copyright (c) 2015-2020 Valve Corporation
+ * Copyright (c) 2015-2020 LunarG, Inc.
+ * Copyright (c) 2015-2020 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -35,6 +35,7 @@
#pragma warning( disable : 4065 )
#endif
+#include <string>
#include <vulkan/vulkan.h>
@@ -53,64 +54,72 @@ static inline const char* string_VkResult(VkResult input_value)
{
switch ((VkResult)input_value)
{
- case VK_ERROR_INITIALIZATION_FAILED:
- return "VK_ERROR_INITIALIZATION_FAILED";
- case VK_ERROR_OUT_OF_DEVICE_MEMORY:
- return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
- case VK_ERROR_NOT_PERMITTED_EXT:
- return "VK_ERROR_NOT_PERMITTED_EXT";
- case VK_ERROR_INVALID_EXTERNAL_HANDLE:
- return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
- case VK_NOT_READY:
- return "VK_NOT_READY";
+ case VK_ERROR_DEVICE_LOST:
+ return "VK_ERROR_DEVICE_LOST";
+ case VK_ERROR_EXTENSION_NOT_PRESENT:
+ return "VK_ERROR_EXTENSION_NOT_PRESENT";
case VK_ERROR_FEATURE_NOT_PRESENT:
return "VK_ERROR_FEATURE_NOT_PRESENT";
- case VK_TIMEOUT:
- return "VK_TIMEOUT";
+ case VK_ERROR_FORMAT_NOT_SUPPORTED:
+ return "VK_ERROR_FORMAT_NOT_SUPPORTED";
+ case VK_ERROR_FRAGMENTATION:
+ return "VK_ERROR_FRAGMENTATION";
case VK_ERROR_FRAGMENTED_POOL:
return "VK_ERROR_FRAGMENTED_POOL";
- case VK_ERROR_LAYER_NOT_PRESENT:
- return "VK_ERROR_LAYER_NOT_PRESENT";
- case VK_ERROR_FRAGMENTATION_EXT:
- return "VK_ERROR_FRAGMENTATION_EXT";
+ case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
+ return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
- case VK_SUCCESS:
- return "VK_SUCCESS";
+ case VK_ERROR_INCOMPATIBLE_DRIVER:
+ return "VK_ERROR_INCOMPATIBLE_DRIVER";
+ case VK_ERROR_INITIALIZATION_FAILED:
+ return "VK_ERROR_INITIALIZATION_FAILED";
+ case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
+ return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
+ case VK_ERROR_INVALID_EXTERNAL_HANDLE:
+ return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
+ case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS:
+ return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS";
case VK_ERROR_INVALID_SHADER_NV:
return "VK_ERROR_INVALID_SHADER_NV";
- case VK_ERROR_FORMAT_NOT_SUPPORTED:
- return "VK_ERROR_FORMAT_NOT_SUPPORTED";
+ case VK_ERROR_LAYER_NOT_PRESENT:
+ return "VK_ERROR_LAYER_NOT_PRESENT";
+ case VK_ERROR_MEMORY_MAP_FAILED:
+ return "VK_ERROR_MEMORY_MAP_FAILED";
+ case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
+ return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
+ case VK_ERROR_NOT_PERMITTED_EXT:
+ return "VK_ERROR_NOT_PERMITTED_EXT";
+ case VK_ERROR_OUT_OF_DATE_KHR:
+ return "VK_ERROR_OUT_OF_DATE_KHR";
+ case VK_ERROR_OUT_OF_DEVICE_MEMORY:
+ return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
+ case VK_ERROR_OUT_OF_HOST_MEMORY:
+ return "VK_ERROR_OUT_OF_HOST_MEMORY";
+ case VK_ERROR_OUT_OF_POOL_MEMORY:
+ return "VK_ERROR_OUT_OF_POOL_MEMORY";
case VK_ERROR_SURFACE_LOST_KHR:
return "VK_ERROR_SURFACE_LOST_KHR";
- case VK_ERROR_VALIDATION_FAILED_EXT:
- return "VK_ERROR_VALIDATION_FAILED_EXT";
- case VK_SUBOPTIMAL_KHR:
- return "VK_SUBOPTIMAL_KHR";
case VK_ERROR_TOO_MANY_OBJECTS:
return "VK_ERROR_TOO_MANY_OBJECTS";
+ case VK_ERROR_UNKNOWN:
+ return "VK_ERROR_UNKNOWN";
+ case VK_ERROR_VALIDATION_FAILED_EXT:
+ return "VK_ERROR_VALIDATION_FAILED_EXT";
case VK_EVENT_RESET:
return "VK_EVENT_RESET";
- case VK_ERROR_OUT_OF_DATE_KHR:
- return "VK_ERROR_OUT_OF_DATE_KHR";
- case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
- return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
- case VK_ERROR_MEMORY_MAP_FAILED:
- return "VK_ERROR_MEMORY_MAP_FAILED";
case VK_EVENT_SET:
return "VK_EVENT_SET";
- case VK_ERROR_INCOMPATIBLE_DRIVER:
- return "VK_ERROR_INCOMPATIBLE_DRIVER";
case VK_INCOMPLETE:
return "VK_INCOMPLETE";
- case VK_ERROR_DEVICE_LOST:
- return "VK_ERROR_DEVICE_LOST";
- case VK_ERROR_EXTENSION_NOT_PRESENT:
- return "VK_ERROR_EXTENSION_NOT_PRESENT";
- case VK_ERROR_OUT_OF_POOL_MEMORY:
- return "VK_ERROR_OUT_OF_POOL_MEMORY";
- case VK_ERROR_OUT_OF_HOST_MEMORY:
- return "VK_ERROR_OUT_OF_HOST_MEMORY";
+ case VK_NOT_READY:
+ return "VK_NOT_READY";
+ case VK_SUBOPTIMAL_KHR:
+ return "VK_SUBOPTIMAL_KHR";
+ case VK_SUCCESS:
+ return "VK_SUCCESS";
+ case VK_TIMEOUT:
+ return "VK_TIMEOUT";
default:
return "Unhandled VkResult";
}
@@ -120,468 +129,806 @@ static inline const char* string_VkStructureType(VkStructureType input_value)
{
switch ((VkStructureType)input_value)
{
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:
- return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER";
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV";
+ case VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV:
+ return "VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV";
+ case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR";
+ case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
+ return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID";
+ case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID:
+ return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID";
+ case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
+ return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID";
+ case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_APPLICATION_INFO:
+ return "VK_STRUCTURE_TYPE_APPLICATION_INFO";
+ case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2:
+ return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2";
+ case VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT:
+ return "VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT";
+ case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2:
+ return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2";
+ case VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT:
+ return "VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT";
+ case VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV:
+ return "VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV";
+ case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
+ return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO";
+ case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO:
+ return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO";
+ case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
+ return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO";
+ case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO:
+ return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO";
+ case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR";
+ case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
+ return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO";
+ case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO:
+ return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO";
+ case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO:
+ return "VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO";
+ case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER:
+ return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER";
+ case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2:
+ return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2";
+ case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT";
+ case VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV:
+ return "VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV";
+ case VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX:
+ return "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX";
+ case VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX:
+ return "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX";
+ case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO:
+ return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO:
+ return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO";
+ case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT";
+ case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO:
+ return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO";
+ case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT";
+ case VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET";
case VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR:
return "VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT";
case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT:
return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT";
case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT:
return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV";
case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
- return "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV";
- case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
- return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO:
- return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO";
- case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2:
- return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2";
- case VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES";
- case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO";
- case VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO:
return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_APPLICATION_INFO:
- return "VK_STRUCTURE_TYPE_APPLICATION_INFO";
- case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT";
+ case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX:
+ return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX";
+ case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX:
+ return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX";
case VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO:
return "VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD:
+ return "VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD";
+ case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2:
+ return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2";
case VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT:
return "VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT";
- case VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES";
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
- return "VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD:
+ return "VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD";
+ case VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT";
+ case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR";
+ case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT:
+ return "VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT";
case VK_STRUCTURE_TYPE_EVENT_CREATE_INFO:
return "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES";
- case VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX:
- return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
+ return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV";
+ case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES";
+ case VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES";
+ case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
+ return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID";
+ case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES";
case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN:
- return "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN";
case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
- return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD";
- case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR:
- return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES";
- case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR";
- case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT";
- case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO";
case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV:
return "VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR";
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO:
- return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR";
- case VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX:
- return "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX";
- case VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2:
- return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2";
- case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR";
- case VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV";
- case VK_STRUCTURE_TYPE_SUBMIT_INFO:
- return "VK_STRUCTURE_TYPE_SUBMIT_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
- return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD";
- case VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT";
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
- return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT";
- case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV";
- case VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
- return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR";
- case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
- return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID";
- case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES";
case VK_STRUCTURE_TYPE_FENCE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
- return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO";
- case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX:
- return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX";
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID:
- return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID";
- case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
- return "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
- case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO:
- return "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES";
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX";
- case VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX:
- return "VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX";
- case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE:
- return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE";
- case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR";
+ case VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR";
+ case VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2:
+ return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO:
+ return "VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO";
case VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO:
return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO";
- case VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
- return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE";
- case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
- return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV:
+ return "VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV";
+ case VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV:
+ return "VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV";
+ case VK_STRUCTURE_TYPE_GEOMETRY_NV:
+ return "VK_STRUCTURE_TYPE_GEOMETRY_NV";
+ case VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV:
+ return "VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV";
+ case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_HDR_METADATA_EXT:
+ return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT";
+ case VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA:
+ return "VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA";
+ case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2:
+ return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:
+ return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER";
+ case VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2:
+ return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2";
+ case VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO:
+ return "VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO";
+ case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2:
+ return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2";
+ case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT:
+ return "VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT";
case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO:
return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT";
- case VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK:
- return "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK";
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR:
- return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER:
- return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER";
+ case VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX:
+ return "VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX";
case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
+ return "VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
+ case VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR";
+ case VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR";
+ case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT";
+ case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV";
+ case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR";
+ case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX:
+ return "VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX";
+ case VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL:
+ return "VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL";
+ case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK:
+ return "VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK";
+ case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK:
+ return "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK";
+ case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE:
+ return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE";
+ case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
+ return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO";
+ case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO:
+ return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_MEMORY_BARRIER:
+ return "VK_STRUCTURE_TYPE_MEMORY_BARRIER";
+ case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
+ return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
+ return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS";
+ case VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR";
+ case VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
+ return "VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
+ case VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR";
+ case VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO:
+ return "VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO";
+ case VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
+ return "VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2";
case VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR:
return "VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR";
- case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES";
- case VK_STRUCTURE_TYPE_HDR_METADATA_EXT:
- return "VK_STRUCTURE_TYPE_HDR_METADATA_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2";
- case VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES";
+ case VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX:
+ return "VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR";
+ case VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL:
+ return "VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES";
- case VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2:
- return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO";
- case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO";
- case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO";
- case VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX:
- return "VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX";
- case VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
- return "VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES";
- case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_MEMORY_BARRIER:
- return "VK_STRUCTURE_TYPE_MEMORY_BARRIER";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO";
- case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT:
- return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2";
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES";
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT";
- case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
- return "VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID";
- case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES:
- return "VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES";
- case VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2:
- return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2";
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
- return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV";
- case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT";
- case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO:
- return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO";
- case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES";
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD:
+ return "VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD";
+ case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR:
+ return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR";
+ case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR:
+ return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR";
+ case VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR:
+ return "VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR";
+ case VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD:
+ return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD";
+ case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO";
case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO:
return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO";
- case VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT:
- return "VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES";
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
- return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO";
- case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR";
- case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT";
- case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
- return "VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR";
+ case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP:
+ return "VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP";
+ case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR";
+ case VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR:
+ return "VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR";
+ case VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE:
+ return "VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE";
+ case VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO:
+ return "VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO";
+ case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL:
+ return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL";
+ case VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV:
+ return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV";
+ case VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2:
+ return "VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT";
case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO:
return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
+ return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES";
+ case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
+ return "VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO";
+ case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT";
+ case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR";
case VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR:
return "VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET";
- case VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR";
- case VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV:
- return "VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV";
- case VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO:
+ return "VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO";
+ case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO:
+ return "VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO";
+ case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO:
+ return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO";
+ case VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR:
+ return "VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR";
+ case VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2:
+ return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2";
+ case VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2:
+ return "VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2";
+ case VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP:
+ return "VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP";
+ case VK_STRUCTURE_TYPE_SUBMIT_INFO:
+ return "VK_STRUCTURE_TYPE_SUBMIT_INFO";
+ case VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO:
+ return "VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO";
+ case VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2:
+ return "VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2";
+ case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2:
+ return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2";
+ case VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE:
+ return "VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE";
+ case VK_STRUCTURE_TYPE_SUBPASS_END_INFO:
+ return "VK_STRUCTURE_TYPE_SUBPASS_END_INFO";
+ case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT:
+ return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT";
+ case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR:
+ return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR";
+ case VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT:
+ return "VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT";
+ case VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR:
+ return "VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR";
+ case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT";
+ case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR:
+ return "VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR";
+ case VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD:
+ return "VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD";
+ case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
+ return "VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD";
+ case VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO:
+ return "VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO";
+ case VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT:
+ return "VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT";
+ case VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT:
+ return "VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT";
+ case VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT:
+ return "VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT";
+ case VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN:
+ return "VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN";
+ case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV:
+ return "VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV";
+ case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR";
case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES";
- case VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK:
- return "VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK";
- case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
- return "VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID";
- case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO:
- return "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO";
- case VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR";
- case VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT:
- return "VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT";
- case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO:
- return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO";
- case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO:
- return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO";
- case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID:
- return "VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID";
- case VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX:
- return "VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX";
- case VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2:
- return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2";
- case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
- return "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2";
- case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
- return "VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR";
- case VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2:
- return "VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2";
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV:
+ return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV";
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT:
+ return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT";
+ case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR";
+ case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR:
+ return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR";
default:
return "Unhandled VkStructureType";
}
@@ -591,16 +938,16 @@ static inline const char* string_VkSystemAllocationScope(VkSystemAllocationScope
{
switch ((VkSystemAllocationScope)input_value)
{
- case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND:
- return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND";
case VK_SYSTEM_ALLOCATION_SCOPE_CACHE:
return "VK_SYSTEM_ALLOCATION_SCOPE_CACHE";
+ case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND:
+ return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND";
+ case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE:
+ return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE";
case VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE:
return "VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE";
case VK_SYSTEM_ALLOCATION_SCOPE_OBJECT:
return "VK_SYSTEM_ALLOCATION_SCOPE_OBJECT";
- case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE:
- return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE";
default:
return "Unhandled VkSystemAllocationScope";
}
@@ -621,460 +968,488 @@ static inline const char* string_VkFormat(VkFormat input_value)
{
switch ((VkFormat)input_value)
{
- case VK_FORMAT_R32G32B32_SINT:
- return "VK_FORMAT_R32G32B32_SINT";
- case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
- return "VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM";
- case VK_FORMAT_B8G8R8A8_UINT:
- return "VK_FORMAT_B8G8R8A8_UINT";
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK";
- case VK_FORMAT_A2R10G10B10_UINT_PACK32:
- return "VK_FORMAT_A2R10G10B10_UINT_PACK32";
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK";
- case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
- return "VK_FORMAT_B4G4R4A4_UNORM_PACK16";
- case VK_FORMAT_R16G16_SINT:
- return "VK_FORMAT_R16G16_SINT";
- case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
- return "VK_FORMAT_BC1_RGB_SRGB_BLOCK";
- case VK_FORMAT_R8G8_USCALED:
- return "VK_FORMAT_R8G8_USCALED";
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK";
- case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
- return "VK_FORMAT_G8_B8R8_2PLANE_420_UNORM";
- case VK_FORMAT_B8G8R8A8_SNORM:
- return "VK_FORMAT_B8G8R8A8_SNORM";
- case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
- return "VK_FORMAT_B5G5R5A1_UNORM_PACK16";
- case VK_FORMAT_R64G64_UINT:
- return "VK_FORMAT_R64G64_UINT";
- case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
- return "VK_FORMAT_R5G5B5A1_UNORM_PACK16";
- case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
- return "VK_FORMAT_A2B10G10R10_UNORM_PACK32";
- case VK_FORMAT_R16G16_USCALED:
- return "VK_FORMAT_R16G16_USCALED";
- case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
- return "VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM";
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK";
- case VK_FORMAT_R8G8_SSCALED:
- return "VK_FORMAT_R8G8_SSCALED";
- case VK_FORMAT_R16G16_SSCALED:
- return "VK_FORMAT_R16G16_SSCALED";
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK";
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- return "VK_FORMAT_EAC_R11_UNORM_BLOCK";
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
return "VK_FORMAT_A1R5G5B5_UNORM_PACK16";
- case VK_FORMAT_R16_USCALED:
- return "VK_FORMAT_R16_USCALED";
- case VK_FORMAT_BC2_UNORM_BLOCK:
- return "VK_FORMAT_BC2_UNORM_BLOCK";
- case VK_FORMAT_R16_UNORM:
- return "VK_FORMAT_R16_UNORM";
- case VK_FORMAT_R8_USCALED:
- return "VK_FORMAT_R8_USCALED";
- case VK_FORMAT_R16G16_UNORM:
- return "VK_FORMAT_R16G16_UNORM";
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK";
- case VK_FORMAT_R16G16B16_SFLOAT:
- return "VK_FORMAT_R16G16B16_SFLOAT";
- case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG";
+ case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+ return "VK_FORMAT_A2B10G10R10_SINT_PACK32";
+ case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+ return "VK_FORMAT_A2B10G10R10_SNORM_PACK32";
+ case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+ return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32";
+ case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+ return "VK_FORMAT_A2B10G10R10_UINT_PACK32";
+ case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+ return "VK_FORMAT_A2B10G10R10_UNORM_PACK32";
+ case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+ return "VK_FORMAT_A2B10G10R10_USCALED_PACK32";
+ case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+ return "VK_FORMAT_A2R10G10B10_SINT_PACK32";
case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
return "VK_FORMAT_A2R10G10B10_SNORM_PACK32";
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK";
- case VK_FORMAT_R8_UNORM:
- return "VK_FORMAT_R8_UNORM";
- case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG";
+ case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+ return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32";
+ case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+ return "VK_FORMAT_A2R10G10B10_UINT_PACK32";
+ case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+ return "VK_FORMAT_A2R10G10B10_UNORM_PACK32";
+ case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+ return "VK_FORMAT_A2R10G10B10_USCALED_PACK32";
case VK_FORMAT_A8B8G8R8_SINT_PACK32:
return "VK_FORMAT_A8B8G8R8_SINT_PACK32";
- case VK_FORMAT_B8G8R8_UNORM:
- return "VK_FORMAT_B8G8R8_UNORM";
- case VK_FORMAT_R8G8_UINT:
- return "VK_FORMAT_R8G8_UINT";
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK";
- case VK_FORMAT_R8_SSCALED:
- return "VK_FORMAT_R8_SSCALED";
+ case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+ return "VK_FORMAT_A8B8G8R8_SNORM_PACK32";
case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
return "VK_FORMAT_A8B8G8R8_SRGB_PACK32";
- case VK_FORMAT_BC7_UNORM_BLOCK:
- return "VK_FORMAT_BC7_UNORM_BLOCK";
- case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
- return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32";
- case VK_FORMAT_R16G16B16A16_SINT:
- return "VK_FORMAT_R16G16B16A16_SINT";
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16";
- case VK_FORMAT_B8G8R8A8_SSCALED:
- return "VK_FORMAT_B8G8R8A8_SSCALED";
- case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
- return "VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM";
- case VK_FORMAT_R8G8B8_USCALED:
- return "VK_FORMAT_R8G8B8_USCALED";
- case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_B8G8R8_SRGB:
- return "VK_FORMAT_B8G8R8_SRGB";
- case VK_FORMAT_A2B10G10R10_UINT_PACK32:
- return "VK_FORMAT_A2B10G10R10_UINT_PACK32";
- case VK_FORMAT_R64G64_SINT:
- return "VK_FORMAT_R64G64_SINT";
- case VK_FORMAT_B8G8R8G8_422_UNORM:
- return "VK_FORMAT_B8G8R8G8_422_UNORM";
- case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
- return "VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM";
- case VK_FORMAT_R64_UINT:
- return "VK_FORMAT_R64_UINT";
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK";
- case VK_FORMAT_BC5_SNORM_BLOCK:
- return "VK_FORMAT_BC5_SNORM_BLOCK";
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK";
- case VK_FORMAT_R16G16B16A16_SSCALED:
- return "VK_FORMAT_R16G16B16A16_SSCALED";
- case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
- return "VK_FORMAT_G8_B8R8_2PLANE_422_UNORM";
- case VK_FORMAT_R32G32B32_UINT:
- return "VK_FORMAT_R32G32B32_UINT";
- case VK_FORMAT_R8G8_SNORM:
- return "VK_FORMAT_R8G8_SNORM";
- case VK_FORMAT_B8G8R8_USCALED:
- return "VK_FORMAT_B8G8R8_USCALED";
- case VK_FORMAT_R16G16B16A16_SFLOAT:
- return "VK_FORMAT_R16G16B16A16_SFLOAT";
- case VK_FORMAT_R16G16B16_USCALED:
- return "VK_FORMAT_R16G16B16_USCALED";
- case VK_FORMAT_A2R10G10B10_SINT_PACK32:
- return "VK_FORMAT_A2R10G10B10_SINT_PACK32";
- case VK_FORMAT_R32_SINT:
- return "VK_FORMAT_R32_SINT";
- case VK_FORMAT_R64_SINT:
- return "VK_FORMAT_R64_SINT";
+ case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+ return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32";
+ case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+ return "VK_FORMAT_A8B8G8R8_UINT_PACK32";
+ case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+ return "VK_FORMAT_A8B8G8R8_UNORM_PACK32";
case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
return "VK_FORMAT_A8B8G8R8_USCALED_PACK32";
- case VK_FORMAT_D24_UNORM_S8_UINT:
- return "VK_FORMAT_D24_UNORM_S8_UINT";
- case VK_FORMAT_G8B8G8R8_422_UNORM:
- return "VK_FORMAT_G8B8G8R8_422_UNORM";
- case VK_FORMAT_BC4_SNORM_BLOCK:
- return "VK_FORMAT_BC4_SNORM_BLOCK";
- case VK_FORMAT_R16G16_SFLOAT:
- return "VK_FORMAT_R16G16_SFLOAT";
- case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
- return "VK_FORMAT_BC1_RGB_UNORM_BLOCK";
- case VK_FORMAT_R64_SFLOAT:
- return "VK_FORMAT_R64_SFLOAT";
- case VK_FORMAT_R64G64B64_SFLOAT:
- return "VK_FORMAT_R64G64B64_SFLOAT";
- case VK_FORMAT_BC3_SRGB_BLOCK:
- return "VK_FORMAT_BC3_SRGB_BLOCK";
- case VK_FORMAT_S8_UINT:
- return "VK_FORMAT_S8_UINT";
- case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_R8G8B8_SNORM:
- return "VK_FORMAT_R8G8B8_SNORM";
- case VK_FORMAT_D32_SFLOAT:
- return "VK_FORMAT_D32_SFLOAT";
+ case VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT";
case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
return "VK_FORMAT_ASTC_10x10_SRGB_BLOCK";
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK";
- case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:
- return "VK_FORMAT_R12X4G12X4_UNORM_2PACK16";
- case VK_FORMAT_G16B16G16R16_422_UNORM:
- return "VK_FORMAT_G16B16G16R16_422_UNORM";
- case VK_FORMAT_BC7_SRGB_BLOCK:
- return "VK_FORMAT_BC7_SRGB_BLOCK";
- case VK_FORMAT_R16G16_SNORM:
- return "VK_FORMAT_R16G16_SNORM";
- case VK_FORMAT_R32_UINT:
- return "VK_FORMAT_R32_UINT";
- case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
- return "VK_FORMAT_R4G4B4A4_UNORM_PACK16";
- case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
- return "VK_FORMAT_A2R10G10B10_USCALED_PACK32";
- case VK_FORMAT_R32_SFLOAT:
- return "VK_FORMAT_R32_SFLOAT";
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT";
case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
return "VK_FORMAT_ASTC_10x5_SRGB_BLOCK";
- case VK_FORMAT_R32G32B32_SFLOAT:
- return "VK_FORMAT_R32G32B32_SFLOAT";
- case VK_FORMAT_R16_UINT:
- return "VK_FORMAT_R16_UINT";
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK";
- case VK_FORMAT_R8G8_SRGB:
- return "VK_FORMAT_R8G8_SRGB";
- case VK_FORMAT_R64G64B64A64_UINT:
- return "VK_FORMAT_R64G64B64A64_UINT";
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT";
case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
return "VK_FORMAT_ASTC_12x10_SRGB_BLOCK";
- case VK_FORMAT_R16G16B16_SNORM:
- return "VK_FORMAT_R16G16B16_SNORM";
- case VK_FORMAT_R32G32_UINT:
- return "VK_FORMAT_R32G32_UINT";
- case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
- return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK";
- case VK_FORMAT_R8G8B8_UNORM:
- return "VK_FORMAT_R8G8B8_UNORM";
- case VK_FORMAT_R8G8B8A8_SSCALED:
- return "VK_FORMAT_R8G8B8A8_SSCALED";
- case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
- return "VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM";
- case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
- return "VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_R16G16B16A16_USCALED:
- return "VK_FORMAT_R16G16B16A16_USCALED";
- case VK_FORMAT_R8G8B8_SINT:
- return "VK_FORMAT_R8G8B8_SINT";
- case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
- return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16";
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK";
+ case VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT:
+ return "VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT";
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK";
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK";
+ case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+ return "VK_FORMAT_B10G11R11_UFLOAT_PACK32";
+ case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
+ return "VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16";
+ case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
+ return "VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16";
case VK_FORMAT_B16G16R16G16_422_UNORM:
return "VK_FORMAT_B16G16R16G16_422_UNORM";
- case VK_FORMAT_R16G16B16_SINT:
- return "VK_FORMAT_R16G16B16_SINT";
- case VK_FORMAT_UNDEFINED:
- return "VK_FORMAT_UNDEFINED";
+ case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
+ return "VK_FORMAT_B4G4R4A4_UNORM_PACK16";
+ case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
+ return "VK_FORMAT_B5G5R5A1_UNORM_PACK16";
case VK_FORMAT_B5G6R5_UNORM_PACK16:
return "VK_FORMAT_B5G6R5_UNORM_PACK16";
- case VK_FORMAT_R8G8B8A8_SRGB:
- return "VK_FORMAT_R8G8B8A8_SRGB";
- case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
- return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32";
+ case VK_FORMAT_B8G8R8A8_SINT:
+ return "VK_FORMAT_B8G8R8A8_SINT";
+ case VK_FORMAT_B8G8R8A8_SNORM:
+ return "VK_FORMAT_B8G8R8A8_SNORM";
+ case VK_FORMAT_B8G8R8A8_SRGB:
+ return "VK_FORMAT_B8G8R8A8_SRGB";
+ case VK_FORMAT_B8G8R8A8_SSCALED:
+ return "VK_FORMAT_B8G8R8A8_SSCALED";
+ case VK_FORMAT_B8G8R8A8_UINT:
+ return "VK_FORMAT_B8G8R8A8_UINT";
+ case VK_FORMAT_B8G8R8A8_UNORM:
+ return "VK_FORMAT_B8G8R8A8_UNORM";
+ case VK_FORMAT_B8G8R8A8_USCALED:
+ return "VK_FORMAT_B8G8R8A8_USCALED";
+ case VK_FORMAT_B8G8R8G8_422_UNORM:
+ return "VK_FORMAT_B8G8R8G8_422_UNORM";
case VK_FORMAT_B8G8R8_SINT:
return "VK_FORMAT_B8G8R8_SINT";
- case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
- return "VK_FORMAT_B10G11R11_UFLOAT_PACK32";
+ case VK_FORMAT_B8G8R8_SNORM:
+ return "VK_FORMAT_B8G8R8_SNORM";
+ case VK_FORMAT_B8G8R8_SRGB:
+ return "VK_FORMAT_B8G8R8_SRGB";
+ case VK_FORMAT_B8G8R8_SSCALED:
+ return "VK_FORMAT_B8G8R8_SSCALED";
+ case VK_FORMAT_B8G8R8_UINT:
+ return "VK_FORMAT_B8G8R8_UINT";
+ case VK_FORMAT_B8G8R8_UNORM:
+ return "VK_FORMAT_B8G8R8_UNORM";
+ case VK_FORMAT_B8G8R8_USCALED:
+ return "VK_FORMAT_B8G8R8_USCALED";
+ case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+ return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK";
+ case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+ return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK";
+ case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+ return "VK_FORMAT_BC1_RGB_SRGB_BLOCK";
+ case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+ return "VK_FORMAT_BC1_RGB_UNORM_BLOCK";
+ case VK_FORMAT_BC2_SRGB_BLOCK:
+ return "VK_FORMAT_BC2_SRGB_BLOCK";
+ case VK_FORMAT_BC2_UNORM_BLOCK:
+ return "VK_FORMAT_BC2_UNORM_BLOCK";
+ case VK_FORMAT_BC3_SRGB_BLOCK:
+ return "VK_FORMAT_BC3_SRGB_BLOCK";
+ case VK_FORMAT_BC3_UNORM_BLOCK:
+ return "VK_FORMAT_BC3_UNORM_BLOCK";
+ case VK_FORMAT_BC4_SNORM_BLOCK:
+ return "VK_FORMAT_BC4_SNORM_BLOCK";
+ case VK_FORMAT_BC4_UNORM_BLOCK:
+ return "VK_FORMAT_BC4_UNORM_BLOCK";
+ case VK_FORMAT_BC5_SNORM_BLOCK:
+ return "VK_FORMAT_BC5_SNORM_BLOCK";
case VK_FORMAT_BC5_UNORM_BLOCK:
return "VK_FORMAT_BC5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK";
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK";
- case VK_FORMAT_R8G8B8A8_SINT:
- return "VK_FORMAT_R8G8B8A8_SINT";
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_R8G8B8A8_UNORM:
- return "VK_FORMAT_R8G8B8A8_UNORM";
- case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
- return "VK_FORMAT_G16_B16R16_2PLANE_420_UNORM";
- case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
- return "VK_FORMAT_G16_B16R16_2PLANE_422_UNORM";
+ case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+ return "VK_FORMAT_BC6H_SFLOAT_BLOCK";
+ case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+ return "VK_FORMAT_BC6H_UFLOAT_BLOCK";
+ case VK_FORMAT_BC7_SRGB_BLOCK:
+ return "VK_FORMAT_BC7_SRGB_BLOCK";
+ case VK_FORMAT_BC7_UNORM_BLOCK:
+ return "VK_FORMAT_BC7_UNORM_BLOCK";
+ case VK_FORMAT_D16_UNORM:
+ return "VK_FORMAT_D16_UNORM";
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ return "VK_FORMAT_D16_UNORM_S8_UINT";
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ return "VK_FORMAT_D24_UNORM_S8_UINT";
+ case VK_FORMAT_D32_SFLOAT:
+ return "VK_FORMAT_D32_SFLOAT";
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ return "VK_FORMAT_D32_SFLOAT_S8_UINT";
+ case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+ return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32";
case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
return "VK_FORMAT_EAC_R11G11_SNORM_BLOCK";
- case VK_FORMAT_R8G8_UNORM:
- return "VK_FORMAT_R8G8_UNORM";
- case VK_FORMAT_A2B10G10R10_SINT_PACK32:
- return "VK_FORMAT_A2B10G10R10_SINT_PACK32";
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK";
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK";
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ return "VK_FORMAT_EAC_R11_SNORM_BLOCK";
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ return "VK_FORMAT_EAC_R11_UNORM_BLOCK";
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK";
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK";
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK";
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK";
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK";
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK";
+ case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
+ return "VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16";
+ case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
+ return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16";
case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
return "VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16";
- case VK_FORMAT_R16_SINT:
- return "VK_FORMAT_R16_SINT";
- case VK_FORMAT_R8G8B8_SRGB:
- return "VK_FORMAT_R8G8B8_SRGB";
- case VK_FORMAT_B8G8R8_SNORM:
- return "VK_FORMAT_B8G8R8_SNORM";
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK";
- case VK_FORMAT_BC2_SRGB_BLOCK:
- return "VK_FORMAT_BC2_SRGB_BLOCK";
- case VK_FORMAT_R10X6_UNORM_PACK16:
- return "VK_FORMAT_R10X6_UNORM_PACK16";
- case VK_FORMAT_R64G64_SFLOAT:
- return "VK_FORMAT_R64G64_SFLOAT";
- case VK_FORMAT_R4G4_UNORM_PACK8:
- return "VK_FORMAT_R4G4_UNORM_PACK8";
- case VK_FORMAT_R16_SSCALED:
- return "VK_FORMAT_R16_SSCALED";
case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16";
- case VK_FORMAT_R32G32B32A32_SINT:
- return "VK_FORMAT_R32G32B32A32_SINT";
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK";
+ case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
+ return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16";
+ case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
+ return "VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16";
+ case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
+ return "VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16";
+ case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
+ return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16";
+ case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
+ return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16";
+ case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
+ return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16";
+ case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
+ return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16";
+ case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
+ return "VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16";
+ case VK_FORMAT_G16B16G16R16_422_UNORM:
+ return "VK_FORMAT_G16B16G16R16_422_UNORM";
+ case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
+ return "VK_FORMAT_G16_B16R16_2PLANE_420_UNORM";
+ case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
+ return "VK_FORMAT_G16_B16R16_2PLANE_422_UNORM";
+ case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
+ return "VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM";
+ case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
+ return "VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM";
+ case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
+ return "VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM";
+ case VK_FORMAT_G8B8G8R8_422_UNORM:
+ return "VK_FORMAT_G8B8G8R8_422_UNORM";
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+ return "VK_FORMAT_G8_B8R8_2PLANE_420_UNORM";
+ case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+ return "VK_FORMAT_G8_B8R8_2PLANE_422_UNORM";
+ case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+ return "VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM";
+ case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
+ return "VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM";
+ case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
+ return "VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM";
case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
return "VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_R8G8B8_UINT:
- return "VK_FORMAT_R8G8B8_UINT";
- case VK_FORMAT_R16G16B16_UNORM:
- return "VK_FORMAT_R16G16B16_UNORM";
- case VK_FORMAT_R16G16B16_UINT:
- return "VK_FORMAT_R16G16B16_UINT";
- case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
- return "VK_FORMAT_A8B8G8R8_UNORM_PACK32";
- case VK_FORMAT_B8G8R8_SSCALED:
- return "VK_FORMAT_B8G8R8_SSCALED";
- case VK_FORMAT_X8_D24_UNORM_PACK32:
- return "VK_FORMAT_X8_D24_UNORM_PACK32";
- case VK_FORMAT_R32G32_SFLOAT:
- return "VK_FORMAT_R32G32_SFLOAT";
- case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
- return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32";
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK";
+ case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
+ return "VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG";
+ case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
+ return "VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG";
+ case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
+ return "VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG";
+ case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
+ return "VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG";
+ case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
+ return "VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG";
+ case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
+ return "VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG";
case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
return "VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG";
- case VK_FORMAT_R16G16B16A16_UINT:
- return "VK_FORMAT_R16G16B16A16_UINT";
- case VK_FORMAT_R8G8B8A8_USCALED:
- return "VK_FORMAT_R8G8B8A8_USCALED";
- case VK_FORMAT_R16G16B16A16_SNORM:
- return "VK_FORMAT_R16G16B16A16_SNORM";
- case VK_FORMAT_R16G16B16A16_UNORM:
- return "VK_FORMAT_R16G16B16A16_UNORM";
- case VK_FORMAT_D16_UNORM:
- return "VK_FORMAT_D16_UNORM";
- case VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16:
- return "VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16";
- case VK_FORMAT_BC3_UNORM_BLOCK:
- return "VK_FORMAT_BC3_UNORM_BLOCK";
- case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
- return "VK_FORMAT_A2B10G10R10_USCALED_PACK32";
- case VK_FORMAT_R8_SRGB:
- return "VK_FORMAT_R8_SRGB";
- case VK_FORMAT_R32G32B32A32_SFLOAT:
- return "VK_FORMAT_R32G32B32A32_SFLOAT";
- case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
- return "VK_FORMAT_A2R10G10B10_UNORM_PACK32";
- case VK_FORMAT_R8G8_SINT:
- return "VK_FORMAT_R8G8_SINT";
case VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16:
return "VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16";
- case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
- return "VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16";
- case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
- return "VK_FORMAT_A2B10G10R10_SNORM_PACK32";
- case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
- return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK";
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- return "VK_FORMAT_D32_SFLOAT_S8_UINT";
- case VK_FORMAT_B8G8R8A8_USCALED:
- return "VK_FORMAT_B8G8R8A8_USCALED";
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK";
- case VK_FORMAT_R5G6B5_UNORM_PACK16:
- return "VK_FORMAT_R5G6B5_UNORM_PACK16";
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK";
+ case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:
+ return "VK_FORMAT_R10X6G10X6_UNORM_2PACK16";
+ case VK_FORMAT_R10X6_UNORM_PACK16:
+ return "VK_FORMAT_R10X6_UNORM_PACK16";
case VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16:
return "VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16";
- case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
- return "VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG";
- case VK_FORMAT_R8G8B8A8_SNORM:
- return "VK_FORMAT_R8G8B8A8_SNORM";
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK";
- case VK_FORMAT_BC6H_SFLOAT_BLOCK:
- return "VK_FORMAT_BC6H_SFLOAT_BLOCK";
+ case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:
+ return "VK_FORMAT_R12X4G12X4_UNORM_2PACK16";
+ case VK_FORMAT_R12X4_UNORM_PACK16:
+ return "VK_FORMAT_R12X4_UNORM_PACK16";
+ case VK_FORMAT_R16G16B16A16_SFLOAT:
+ return "VK_FORMAT_R16G16B16A16_SFLOAT";
+ case VK_FORMAT_R16G16B16A16_SINT:
+ return "VK_FORMAT_R16G16B16A16_SINT";
+ case VK_FORMAT_R16G16B16A16_SNORM:
+ return "VK_FORMAT_R16G16B16A16_SNORM";
+ case VK_FORMAT_R16G16B16A16_SSCALED:
+ return "VK_FORMAT_R16G16B16A16_SSCALED";
+ case VK_FORMAT_R16G16B16A16_UINT:
+ return "VK_FORMAT_R16G16B16A16_UINT";
+ case VK_FORMAT_R16G16B16A16_UNORM:
+ return "VK_FORMAT_R16G16B16A16_UNORM";
+ case VK_FORMAT_R16G16B16A16_USCALED:
+ return "VK_FORMAT_R16G16B16A16_USCALED";
+ case VK_FORMAT_R16G16B16_SFLOAT:
+ return "VK_FORMAT_R16G16B16_SFLOAT";
+ case VK_FORMAT_R16G16B16_SINT:
+ return "VK_FORMAT_R16G16B16_SINT";
+ case VK_FORMAT_R16G16B16_SNORM:
+ return "VK_FORMAT_R16G16B16_SNORM";
+ case VK_FORMAT_R16G16B16_SSCALED:
+ return "VK_FORMAT_R16G16B16_SSCALED";
+ case VK_FORMAT_R16G16B16_UINT:
+ return "VK_FORMAT_R16G16B16_UINT";
+ case VK_FORMAT_R16G16B16_UNORM:
+ return "VK_FORMAT_R16G16B16_UNORM";
+ case VK_FORMAT_R16G16B16_USCALED:
+ return "VK_FORMAT_R16G16B16_USCALED";
+ case VK_FORMAT_R16G16_SFLOAT:
+ return "VK_FORMAT_R16G16_SFLOAT";
+ case VK_FORMAT_R16G16_SINT:
+ return "VK_FORMAT_R16G16_SINT";
+ case VK_FORMAT_R16G16_SNORM:
+ return "VK_FORMAT_R16G16_SNORM";
+ case VK_FORMAT_R16G16_SSCALED:
+ return "VK_FORMAT_R16G16_SSCALED";
+ case VK_FORMAT_R16G16_UINT:
+ return "VK_FORMAT_R16G16_UINT";
+ case VK_FORMAT_R16G16_UNORM:
+ return "VK_FORMAT_R16G16_UNORM";
+ case VK_FORMAT_R16G16_USCALED:
+ return "VK_FORMAT_R16G16_USCALED";
case VK_FORMAT_R16_SFLOAT:
return "VK_FORMAT_R16_SFLOAT";
- case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
- return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32";
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK";
- case VK_FORMAT_B8G8R8A8_SINT:
- return "VK_FORMAT_B8G8R8A8_SINT";
- case VK_FORMAT_R8_SNORM:
- return "VK_FORMAT_R8_SNORM";
- case VK_FORMAT_R32G32_SINT:
- return "VK_FORMAT_R32G32_SINT";
+ case VK_FORMAT_R16_SINT:
+ return "VK_FORMAT_R16_SINT";
+ case VK_FORMAT_R16_SNORM:
+ return "VK_FORMAT_R16_SNORM";
+ case VK_FORMAT_R16_SSCALED:
+ return "VK_FORMAT_R16_SSCALED";
+ case VK_FORMAT_R16_UINT:
+ return "VK_FORMAT_R16_UINT";
+ case VK_FORMAT_R16_UNORM:
+ return "VK_FORMAT_R16_UNORM";
+ case VK_FORMAT_R16_USCALED:
+ return "VK_FORMAT_R16_USCALED";
+ case VK_FORMAT_R32G32B32A32_SFLOAT:
+ return "VK_FORMAT_R32G32B32A32_SFLOAT";
+ case VK_FORMAT_R32G32B32A32_SINT:
+ return "VK_FORMAT_R32G32B32A32_SINT";
case VK_FORMAT_R32G32B32A32_UINT:
return "VK_FORMAT_R32G32B32A32_UINT";
- case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
- return "VK_FORMAT_A8B8G8R8_SNORM_PACK32";
- case VK_FORMAT_A8B8G8R8_UINT_PACK32:
- return "VK_FORMAT_A8B8G8R8_UINT_PACK32";
- case VK_FORMAT_BC4_UNORM_BLOCK:
- return "VK_FORMAT_BC4_UNORM_BLOCK";
- case VK_FORMAT_B8G8R8_UINT:
- return "VK_FORMAT_B8G8R8_UINT";
- case VK_FORMAT_D16_UNORM_S8_UINT:
- return "VK_FORMAT_D16_UNORM_S8_UINT";
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK";
- case VK_FORMAT_R8G8B8A8_UINT:
- return "VK_FORMAT_R8G8B8A8_UINT";
- case VK_FORMAT_R12X4_UNORM_PACK16:
- return "VK_FORMAT_R12X4_UNORM_PACK16";
+ case VK_FORMAT_R32G32B32_SFLOAT:
+ return "VK_FORMAT_R32G32B32_SFLOAT";
+ case VK_FORMAT_R32G32B32_SINT:
+ return "VK_FORMAT_R32G32B32_SINT";
+ case VK_FORMAT_R32G32B32_UINT:
+ return "VK_FORMAT_R32G32B32_UINT";
+ case VK_FORMAT_R32G32_SFLOAT:
+ return "VK_FORMAT_R32G32_SFLOAT";
+ case VK_FORMAT_R32G32_SINT:
+ return "VK_FORMAT_R32G32_SINT";
+ case VK_FORMAT_R32G32_UINT:
+ return "VK_FORMAT_R32G32_UINT";
+ case VK_FORMAT_R32_SFLOAT:
+ return "VK_FORMAT_R32_SFLOAT";
+ case VK_FORMAT_R32_SINT:
+ return "VK_FORMAT_R32_SINT";
+ case VK_FORMAT_R32_UINT:
+ return "VK_FORMAT_R32_UINT";
+ case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+ return "VK_FORMAT_R4G4B4A4_UNORM_PACK16";
+ case VK_FORMAT_R4G4_UNORM_PACK8:
+ return "VK_FORMAT_R4G4_UNORM_PACK8";
+ case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+ return "VK_FORMAT_R5G5B5A1_UNORM_PACK16";
+ case VK_FORMAT_R5G6B5_UNORM_PACK16:
+ return "VK_FORMAT_R5G6B5_UNORM_PACK16";
+ case VK_FORMAT_R64G64B64A64_SFLOAT:
+ return "VK_FORMAT_R64G64B64A64_SFLOAT";
+ case VK_FORMAT_R64G64B64A64_SINT:
+ return "VK_FORMAT_R64G64B64A64_SINT";
+ case VK_FORMAT_R64G64B64A64_UINT:
+ return "VK_FORMAT_R64G64B64A64_UINT";
+ case VK_FORMAT_R64G64B64_SFLOAT:
+ return "VK_FORMAT_R64G64B64_SFLOAT";
case VK_FORMAT_R64G64B64_SINT:
return "VK_FORMAT_R64G64B64_SINT";
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- return "VK_FORMAT_EAC_R11_SNORM_BLOCK";
case VK_FORMAT_R64G64B64_UINT:
return "VK_FORMAT_R64G64B64_UINT";
- case VK_FORMAT_R64G64B64A64_SINT:
- return "VK_FORMAT_R64G64B64A64_SINT";
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK";
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK";
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK";
+ case VK_FORMAT_R64G64_SFLOAT:
+ return "VK_FORMAT_R64G64_SFLOAT";
+ case VK_FORMAT_R64G64_SINT:
+ return "VK_FORMAT_R64G64_SINT";
+ case VK_FORMAT_R64G64_UINT:
+ return "VK_FORMAT_R64G64_UINT";
+ case VK_FORMAT_R64_SFLOAT:
+ return "VK_FORMAT_R64_SFLOAT";
+ case VK_FORMAT_R64_SINT:
+ return "VK_FORMAT_R64_SINT";
+ case VK_FORMAT_R64_UINT:
+ return "VK_FORMAT_R64_UINT";
+ case VK_FORMAT_R8G8B8A8_SINT:
+ return "VK_FORMAT_R8G8B8A8_SINT";
+ case VK_FORMAT_R8G8B8A8_SNORM:
+ return "VK_FORMAT_R8G8B8A8_SNORM";
+ case VK_FORMAT_R8G8B8A8_SRGB:
+ return "VK_FORMAT_R8G8B8A8_SRGB";
+ case VK_FORMAT_R8G8B8A8_SSCALED:
+ return "VK_FORMAT_R8G8B8A8_SSCALED";
+ case VK_FORMAT_R8G8B8A8_UINT:
+ return "VK_FORMAT_R8G8B8A8_UINT";
+ case VK_FORMAT_R8G8B8A8_UNORM:
+ return "VK_FORMAT_R8G8B8A8_UNORM";
+ case VK_FORMAT_R8G8B8A8_USCALED:
+ return "VK_FORMAT_R8G8B8A8_USCALED";
+ case VK_FORMAT_R8G8B8_SINT:
+ return "VK_FORMAT_R8G8B8_SINT";
+ case VK_FORMAT_R8G8B8_SNORM:
+ return "VK_FORMAT_R8G8B8_SNORM";
+ case VK_FORMAT_R8G8B8_SRGB:
+ return "VK_FORMAT_R8G8B8_SRGB";
case VK_FORMAT_R8G8B8_SSCALED:
return "VK_FORMAT_R8G8B8_SSCALED";
- case VK_FORMAT_B8G8R8A8_UNORM:
- return "VK_FORMAT_B8G8R8A8_UNORM";
- case VK_FORMAT_R16_SNORM:
- return "VK_FORMAT_R16_SNORM";
- case VK_FORMAT_R8_UINT:
- return "VK_FORMAT_R8_UINT";
- case VK_FORMAT_R64G64B64A64_SFLOAT:
- return "VK_FORMAT_R64G64B64A64_SFLOAT";
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK";
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK";
+ case VK_FORMAT_R8G8B8_UINT:
+ return "VK_FORMAT_R8G8B8_UINT";
+ case VK_FORMAT_R8G8B8_UNORM:
+ return "VK_FORMAT_R8G8B8_UNORM";
+ case VK_FORMAT_R8G8B8_USCALED:
+ return "VK_FORMAT_R8G8B8_USCALED";
+ case VK_FORMAT_R8G8_SINT:
+ return "VK_FORMAT_R8G8_SINT";
+ case VK_FORMAT_R8G8_SNORM:
+ return "VK_FORMAT_R8G8_SNORM";
+ case VK_FORMAT_R8G8_SRGB:
+ return "VK_FORMAT_R8G8_SRGB";
+ case VK_FORMAT_R8G8_SSCALED:
+ return "VK_FORMAT_R8G8_SSCALED";
+ case VK_FORMAT_R8G8_UINT:
+ return "VK_FORMAT_R8G8_UINT";
+ case VK_FORMAT_R8G8_UNORM:
+ return "VK_FORMAT_R8G8_UNORM";
+ case VK_FORMAT_R8G8_USCALED:
+ return "VK_FORMAT_R8G8_USCALED";
case VK_FORMAT_R8_SINT:
return "VK_FORMAT_R8_SINT";
- case VK_FORMAT_B8G8R8A8_SRGB:
- return "VK_FORMAT_B8G8R8A8_SRGB";
- case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
- return "VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16";
- case VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16:
- return "VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16";
- case VK_FORMAT_BC6H_UFLOAT_BLOCK:
- return "VK_FORMAT_BC6H_UFLOAT_BLOCK";
- case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
- return "VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM";
- case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:
- return "VK_FORMAT_R10X6G10X6_UNORM_2PACK16";
- case VK_FORMAT_R16G16_UINT:
- return "VK_FORMAT_R16G16_UINT";
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK";
- case VK_FORMAT_R16G16B16_SSCALED:
- return "VK_FORMAT_R16G16B16_SSCALED";
- case VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16:
- return "VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16";
+ case VK_FORMAT_R8_SNORM:
+ return "VK_FORMAT_R8_SNORM";
+ case VK_FORMAT_R8_SRGB:
+ return "VK_FORMAT_R8_SRGB";
+ case VK_FORMAT_R8_SSCALED:
+ return "VK_FORMAT_R8_SSCALED";
+ case VK_FORMAT_R8_UINT:
+ return "VK_FORMAT_R8_UINT";
+ case VK_FORMAT_R8_UNORM:
+ return "VK_FORMAT_R8_UNORM";
+ case VK_FORMAT_R8_USCALED:
+ return "VK_FORMAT_R8_USCALED";
+ case VK_FORMAT_S8_UINT:
+ return "VK_FORMAT_S8_UINT";
+ case VK_FORMAT_UNDEFINED:
+ return "VK_FORMAT_UNDEFINED";
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ return "VK_FORMAT_X8_D24_UNORM_PACK32";
default:
return "Unhandled VkFormat";
}
@@ -1084,67 +1459,85 @@ static inline const char* string_VkFormatFeatureFlagBits(VkFormatFeatureFlagBits
{
switch ((VkFormatFeatureFlagBits)input_value)
{
+ case VK_FORMAT_FEATURE_BLIT_DST_BIT:
+ return "VK_FORMAT_FEATURE_BLIT_DST_BIT";
+ case VK_FORMAT_FEATURE_BLIT_SRC_BIT:
+ return "VK_FORMAT_FEATURE_BLIT_SRC_BIT";
+ case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT:
+ return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT";
case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT:
return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT";
- case VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT:
- return "VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT";
- case VK_FORMAT_FEATURE_TRANSFER_DST_BIT:
- return "VK_FORMAT_FEATURE_TRANSFER_DST_BIT";
case VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT:
return "VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT";
case VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT:
return "VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT";
- case VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT:
- return "VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT";
- case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT:
- return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT";
case VK_FORMAT_FEATURE_DISJOINT_BIT:
return "VK_FORMAT_FEATURE_DISJOINT_BIT";
+ case VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT:
+ return "VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT";
+ case VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT:
+ return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT";
case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT:
return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT";
- case VK_FORMAT_FEATURE_TRANSFER_SRC_BIT:
- return "VK_FORMAT_FEATURE_TRANSFER_SRC_BIT";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT";
+ case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT:
+ return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT";
case VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT:
return "VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG";
case VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT:
return "VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT";
- case VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT:
- return "VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT";
- case VK_FORMAT_FEATURE_BLIT_DST_BIT:
- return "VK_FORMAT_FEATURE_BLIT_DST_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT";
- case VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT:
- return "VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT";
+ case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT:
+ return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT";
case VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT:
return "VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT";
- case VK_FORMAT_FEATURE_BLIT_SRC_BIT:
- return "VK_FORMAT_FEATURE_BLIT_SRC_BIT";
- case VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT:
- return "VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT";
+ case VK_FORMAT_FEATURE_TRANSFER_DST_BIT:
+ return "VK_FORMAT_FEATURE_TRANSFER_DST_BIT";
+ case VK_FORMAT_FEATURE_TRANSFER_SRC_BIT:
+ return "VK_FORMAT_FEATURE_TRANSFER_SRC_BIT";
+ case VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT:
+ return "VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT";
+ case VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT:
+ return "VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT";
default:
return "Unhandled VkFormatFeatureFlagBits";
}
}
+static inline std::string string_VkFormatFeatureFlags(VkFormatFeatureFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkFormatFeatureFlagBits(static_cast<VkFormatFeatureFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkFormatFeatureFlagBits(static_cast<VkFormatFeatureFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkImageType(VkImageType input_value)
{
switch ((VkImageType)input_value)
{
- case VK_IMAGE_TYPE_2D:
- return "VK_IMAGE_TYPE_2D";
case VK_IMAGE_TYPE_1D:
return "VK_IMAGE_TYPE_1D";
+ case VK_IMAGE_TYPE_2D:
+ return "VK_IMAGE_TYPE_2D";
case VK_IMAGE_TYPE_3D:
return "VK_IMAGE_TYPE_3D";
default:
@@ -1156,10 +1549,12 @@ static inline const char* string_VkImageTiling(VkImageTiling input_value)
{
switch ((VkImageTiling)input_value)
{
- case VK_IMAGE_TILING_OPTIMAL:
- return "VK_IMAGE_TILING_OPTIMAL";
+ case VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT:
+ return "VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT";
case VK_IMAGE_TILING_LINEAR:
return "VK_IMAGE_TILING_LINEAR";
+ case VK_IMAGE_TILING_OPTIMAL:
+ return "VK_IMAGE_TILING_OPTIMAL";
default:
return "Unhandled VkImageTiling";
}
@@ -1169,78 +1564,118 @@ static inline const char* string_VkImageUsageFlagBits(VkImageUsageFlagBits input
{
switch ((VkImageUsageFlagBits)input_value)
{
- case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
- case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT";
+ case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT:
+ return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT";
case VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT:
return "VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT";
+ case VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT:
+ return "VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT";
+ case VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT:
+ return "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
case VK_IMAGE_USAGE_SAMPLED_BIT:
return "VK_IMAGE_USAGE_SAMPLED_BIT";
- case VK_IMAGE_USAGE_TRANSFER_DST_BIT:
- return "VK_IMAGE_USAGE_TRANSFER_DST_BIT";
+ case VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV:
+ return "VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV";
case VK_IMAGE_USAGE_STORAGE_BIT:
return "VK_IMAGE_USAGE_STORAGE_BIT";
+ case VK_IMAGE_USAGE_TRANSFER_DST_BIT:
+ return "VK_IMAGE_USAGE_TRANSFER_DST_BIT";
case VK_IMAGE_USAGE_TRANSFER_SRC_BIT:
return "VK_IMAGE_USAGE_TRANSFER_SRC_BIT";
- case VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT:
- return "VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT";
+ case VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT:
+ return "VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT";
default:
return "Unhandled VkImageUsageFlagBits";
}
}
+static inline std::string string_VkImageUsageFlags(VkImageUsageFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkImageUsageFlagBits(static_cast<VkImageUsageFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkImageUsageFlagBits(static_cast<VkImageUsageFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkImageCreateFlagBits(VkImageCreateFlagBits input_value)
{
switch ((VkImageCreateFlagBits)input_value)
{
+ case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT:
+ return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT";
case VK_IMAGE_CREATE_ALIAS_BIT:
return "VK_IMAGE_CREATE_ALIAS_BIT";
- case VK_IMAGE_CREATE_PROTECTED_BIT:
- return "VK_IMAGE_CREATE_PROTECTED_BIT";
case VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT:
return "VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT";
- case VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT:
- return "VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT";
- case VK_IMAGE_CREATE_EXTENDED_USAGE_BIT:
- return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT";
+ case VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV:
+ return "VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV";
+ case VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT:
+ return "VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT";
case VK_IMAGE_CREATE_DISJOINT_BIT:
return "VK_IMAGE_CREATE_DISJOINT_BIT";
+ case VK_IMAGE_CREATE_EXTENDED_USAGE_BIT:
+ return "VK_IMAGE_CREATE_EXTENDED_USAGE_BIT";
case VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT:
return "VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT";
- case VK_IMAGE_CREATE_SPARSE_BINDING_BIT:
- return "VK_IMAGE_CREATE_SPARSE_BINDING_BIT";
+ case VK_IMAGE_CREATE_PROTECTED_BIT:
+ return "VK_IMAGE_CREATE_PROTECTED_BIT";
case VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT:
return "VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT";
- case VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT:
- return "VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT";
case VK_IMAGE_CREATE_SPARSE_ALIASED_BIT:
return "VK_IMAGE_CREATE_SPARSE_ALIASED_BIT";
+ case VK_IMAGE_CREATE_SPARSE_BINDING_BIT:
+ return "VK_IMAGE_CREATE_SPARSE_BINDING_BIT";
case VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT:
return "VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT";
case VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT:
return "VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT";
+ case VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT:
+ return "VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT";
default:
return "Unhandled VkImageCreateFlagBits";
}
}
+static inline std::string string_VkImageCreateFlags(VkImageCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkImageCreateFlagBits(static_cast<VkImageCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkImageCreateFlagBits(static_cast<VkImageCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkSampleCountFlagBits(VkSampleCountFlagBits input_value)
{
switch ((VkSampleCountFlagBits)input_value)
{
- case VK_SAMPLE_COUNT_32_BIT:
- return "VK_SAMPLE_COUNT_32_BIT";
+ case VK_SAMPLE_COUNT_16_BIT:
+ return "VK_SAMPLE_COUNT_16_BIT";
case VK_SAMPLE_COUNT_1_BIT:
return "VK_SAMPLE_COUNT_1_BIT";
case VK_SAMPLE_COUNT_2_BIT:
return "VK_SAMPLE_COUNT_2_BIT";
- case VK_SAMPLE_COUNT_64_BIT:
- return "VK_SAMPLE_COUNT_64_BIT";
- case VK_SAMPLE_COUNT_16_BIT:
- return "VK_SAMPLE_COUNT_16_BIT";
+ case VK_SAMPLE_COUNT_32_BIT:
+ return "VK_SAMPLE_COUNT_32_BIT";
case VK_SAMPLE_COUNT_4_BIT:
return "VK_SAMPLE_COUNT_4_BIT";
+ case VK_SAMPLE_COUNT_64_BIT:
+ return "VK_SAMPLE_COUNT_64_BIT";
case VK_SAMPLE_COUNT_8_BIT:
return "VK_SAMPLE_COUNT_8_BIT";
default:
@@ -1248,20 +1683,36 @@ static inline const char* string_VkSampleCountFlagBits(VkSampleCountFlagBits inp
}
}
+static inline std::string string_VkSampleCountFlags(VkSampleCountFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSampleCountFlagBits(static_cast<VkSampleCountFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSampleCountFlagBits(static_cast<VkSampleCountFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkPhysicalDeviceType(VkPhysicalDeviceType input_value)
{
switch ((VkPhysicalDeviceType)input_value)
{
- case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
- return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU";
- case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
- return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU";
- case VK_PHYSICAL_DEVICE_TYPE_OTHER:
- return "VK_PHYSICAL_DEVICE_TYPE_OTHER";
case VK_PHYSICAL_DEVICE_TYPE_CPU:
return "VK_PHYSICAL_DEVICE_TYPE_CPU";
+ case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
+ return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU";
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
return "VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU";
+ case VK_PHYSICAL_DEVICE_TYPE_OTHER:
+ return "VK_PHYSICAL_DEVICE_TYPE_OTHER";
+ case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
+ return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU";
default:
return "Unhandled VkPhysicalDeviceType";
}
@@ -1271,14 +1722,14 @@ static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value)
{
switch ((VkQueueFlagBits)input_value)
{
- case VK_QUEUE_SPARSE_BINDING_BIT:
- return "VK_QUEUE_SPARSE_BINDING_BIT";
- case VK_QUEUE_GRAPHICS_BIT:
- return "VK_QUEUE_GRAPHICS_BIT";
case VK_QUEUE_COMPUTE_BIT:
return "VK_QUEUE_COMPUTE_BIT";
+ case VK_QUEUE_GRAPHICS_BIT:
+ return "VK_QUEUE_GRAPHICS_BIT";
case VK_QUEUE_PROTECTED_BIT:
return "VK_QUEUE_PROTECTED_BIT";
+ case VK_QUEUE_SPARSE_BINDING_BIT:
+ return "VK_QUEUE_SPARSE_BINDING_BIT";
case VK_QUEUE_TRANSFER_BIT:
return "VK_QUEUE_TRANSFER_BIT";
default:
@@ -1286,20 +1737,40 @@ static inline const char* string_VkQueueFlagBits(VkQueueFlagBits input_value)
}
}
+static inline std::string string_VkQueueFlags(VkQueueFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkQueueFlagBits(static_cast<VkQueueFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkQueueFlagBits(static_cast<VkQueueFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBits input_value)
{
switch ((VkMemoryPropertyFlagBits)input_value)
{
- case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT:
- return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT";
+ case VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD:
+ return "VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD";
case VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT:
return "VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT";
- case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT:
- return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT";
+ case VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD:
+ return "VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD";
case VK_MEMORY_PROPERTY_HOST_CACHED_BIT:
return "VK_MEMORY_PROPERTY_HOST_CACHED_BIT";
case VK_MEMORY_PROPERTY_HOST_COHERENT_BIT:
return "VK_MEMORY_PROPERTY_HOST_COHERENT_BIT";
+ case VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT:
+ return "VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT";
+ case VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT:
+ return "VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT";
case VK_MEMORY_PROPERTY_PROTECTED_BIT:
return "VK_MEMORY_PROPERTY_PROTECTED_BIT";
default:
@@ -1307,6 +1778,22 @@ static inline const char* string_VkMemoryPropertyFlagBits(VkMemoryPropertyFlagBi
}
}
+static inline std::string string_VkMemoryPropertyFlags(VkMemoryPropertyFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkMemoryPropertyFlagBits(static_cast<VkMemoryPropertyFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkMemoryPropertyFlagBits(static_cast<VkMemoryPropertyFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input_value)
{
switch ((VkMemoryHeapFlagBits)input_value)
@@ -1320,6 +1807,22 @@ static inline const char* string_VkMemoryHeapFlagBits(VkMemoryHeapFlagBits input
}
}
+static inline std::string string_VkMemoryHeapFlags(VkMemoryHeapFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkMemoryHeapFlagBits(static_cast<VkMemoryHeapFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkMemoryHeapFlagBits(static_cast<VkMemoryHeapFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreateFlagBits input_value)
{
switch ((VkDeviceQueueCreateFlagBits)input_value)
@@ -1331,89 +1834,177 @@ static inline const char* string_VkDeviceQueueCreateFlagBits(VkDeviceQueueCreate
}
}
+static inline std::string string_VkDeviceQueueCreateFlags(VkDeviceQueueCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDeviceQueueCreateFlagBits(static_cast<VkDeviceQueueCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDeviceQueueCreateFlagBits(static_cast<VkDeviceQueueCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkPipelineStageFlagBits(VkPipelineStageFlagBits input_value)
{
switch ((VkPipelineStageFlagBits)input_value)
{
- case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:
- return "VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT";
+ case VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV:
+ return "VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV";
+ case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT:
+ return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT";
case VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT:
return "VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT";
- case VK_PIPELINE_STAGE_TRANSFER_BIT:
- return "VK_PIPELINE_STAGE_TRANSFER_BIT";
- case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
- return "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT";
+ case VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:
+ return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT";
+ case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:
+ return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT";
case VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX:
return "VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX";
- case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:
- return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
- return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT";
+ case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:
+ return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT";
+ case VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT:
+ return "VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT";
+ case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:
+ return "VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT";
+ case VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:
+ return "VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT";
+ case VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT:
+ return "VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT";
case VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT:
return "VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT";
- case VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:
- return "VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT";
case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:
return "VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT";
- case VK_PIPELINE_STAGE_ALL_COMMANDS_BIT:
- return "VK_PIPELINE_STAGE_ALL_COMMANDS_BIT";
- case VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:
- return "VK_PIPELINE_STAGE_VERTEX_SHADER_BIT";
- case VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:
- return "VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT";
- case VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:
- return "VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT";
case VK_PIPELINE_STAGE_HOST_BIT:
return "VK_PIPELINE_STAGE_HOST_BIT";
- case VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:
- return "VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT";
- case VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:
- return "VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT";
+ case VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:
+ return "VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT";
+ case VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV:
+ return "VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV:
+ return "VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV:
+ return "VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV";
+ case VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV:
+ return "VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV";
+ case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
+ return "VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT";
+ case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
+ return "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT";
+ case VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:
+ return "VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT";
+ case VK_PIPELINE_STAGE_TRANSFER_BIT:
+ return "VK_PIPELINE_STAGE_TRANSFER_BIT";
+ case VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT:
+ return "VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT";
case VK_PIPELINE_STAGE_VERTEX_INPUT_BIT:
return "VK_PIPELINE_STAGE_VERTEX_INPUT_BIT";
+ case VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:
+ return "VK_PIPELINE_STAGE_VERTEX_SHADER_BIT";
default:
return "Unhandled VkPipelineStageFlagBits";
}
}
+static inline std::string string_VkPipelineStageFlags(VkPipelineStageFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPipelineStageFlagBits(static_cast<VkPipelineStageFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkImageAspectFlagBits(VkImageAspectFlagBits input_value)
{
switch ((VkImageAspectFlagBits)input_value)
{
+ case VK_IMAGE_ASPECT_COLOR_BIT:
+ return "VK_IMAGE_ASPECT_COLOR_BIT";
+ case VK_IMAGE_ASPECT_DEPTH_BIT:
+ return "VK_IMAGE_ASPECT_DEPTH_BIT";
+ case VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT:
+ return "VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT";
+ case VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT:
+ return "VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT";
+ case VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT:
+ return "VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT";
+ case VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT:
+ return "VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT";
+ case VK_IMAGE_ASPECT_METADATA_BIT:
+ return "VK_IMAGE_ASPECT_METADATA_BIT";
case VK_IMAGE_ASPECT_PLANE_0_BIT:
return "VK_IMAGE_ASPECT_PLANE_0_BIT";
+ case VK_IMAGE_ASPECT_PLANE_1_BIT:
+ return "VK_IMAGE_ASPECT_PLANE_1_BIT";
case VK_IMAGE_ASPECT_PLANE_2_BIT:
return "VK_IMAGE_ASPECT_PLANE_2_BIT";
case VK_IMAGE_ASPECT_STENCIL_BIT:
return "VK_IMAGE_ASPECT_STENCIL_BIT";
- case VK_IMAGE_ASPECT_PLANE_1_BIT:
- return "VK_IMAGE_ASPECT_PLANE_1_BIT";
- case VK_IMAGE_ASPECT_COLOR_BIT:
- return "VK_IMAGE_ASPECT_COLOR_BIT";
- case VK_IMAGE_ASPECT_METADATA_BIT:
- return "VK_IMAGE_ASPECT_METADATA_BIT";
- case VK_IMAGE_ASPECT_DEPTH_BIT:
- return "VK_IMAGE_ASPECT_DEPTH_BIT";
default:
return "Unhandled VkImageAspectFlagBits";
}
}
+static inline std::string string_VkImageAspectFlags(VkImageAspectFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkImageAspectFlagBits(static_cast<VkImageAspectFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkImageAspectFlagBits(static_cast<VkImageAspectFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkSparseImageFormatFlagBits(VkSparseImageFormatFlagBits input_value)
{
switch ((VkSparseImageFormatFlagBits)input_value)
{
case VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT:
return "VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT";
- case VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT:
- return "VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT";
case VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT:
return "VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT";
+ case VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT:
+ return "VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT";
default:
return "Unhandled VkSparseImageFormatFlagBits";
}
}
+static inline std::string string_VkSparseImageFormatFlags(VkSparseImageFormatFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSparseImageFormatFlagBits(static_cast<VkSparseImageFormatFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSparseImageFormatFlagBits(static_cast<VkSparseImageFormatFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFlagBits input_value)
{
switch ((VkSparseMemoryBindFlagBits)input_value)
@@ -1425,6 +2016,22 @@ static inline const char* string_VkSparseMemoryBindFlagBits(VkSparseMemoryBindFl
}
}
+static inline std::string string_VkSparseMemoryBindFlags(VkSparseMemoryBindFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSparseMemoryBindFlagBits(static_cast<VkSparseMemoryBindFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSparseMemoryBindFlagBits(static_cast<VkSparseMemoryBindFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits input_value)
{
switch ((VkFenceCreateFlagBits)input_value)
@@ -1436,16 +2043,40 @@ static inline const char* string_VkFenceCreateFlagBits(VkFenceCreateFlagBits inp
}
}
+static inline std::string string_VkFenceCreateFlags(VkFenceCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkFenceCreateFlagBits(static_cast<VkFenceCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkFenceCreateFlagBits(static_cast<VkFenceCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkQueryType(VkQueryType input_value)
{
switch ((VkQueryType)input_value)
{
- case VK_QUERY_TYPE_TIMESTAMP:
- return "VK_QUERY_TYPE_TIMESTAMP";
- case VK_QUERY_TYPE_PIPELINE_STATISTICS:
- return "VK_QUERY_TYPE_PIPELINE_STATISTICS";
+ case VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV:
+ return "VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV";
case VK_QUERY_TYPE_OCCLUSION:
return "VK_QUERY_TYPE_OCCLUSION";
+ case VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL:
+ return "VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL";
+ case VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR:
+ return "VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR";
+ case VK_QUERY_TYPE_PIPELINE_STATISTICS:
+ return "VK_QUERY_TYPE_PIPELINE_STATISTICS";
+ case VK_QUERY_TYPE_TIMESTAMP:
+ return "VK_QUERY_TYPE_TIMESTAMP";
+ case VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT:
+ return "VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT";
default:
return "Unhandled VkQueryType";
}
@@ -1455,102 +2086,178 @@ static inline const char* string_VkQueryPipelineStatisticFlagBits(VkQueryPipelin
{
switch ((VkQueryPipelineStatisticFlagBits)input_value)
{
- case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT";
case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT:
return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT";
case VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT:
return "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT";
- case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT:
- return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT";
case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT:
return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT";
+ case VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT:
+ return "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT";
default:
return "Unhandled VkQueryPipelineStatisticFlagBits";
}
}
+static inline std::string string_VkQueryPipelineStatisticFlags(VkQueryPipelineStatisticFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast<VkQueryPipelineStatisticFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkQueryPipelineStatisticFlagBits(static_cast<VkQueryPipelineStatisticFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkQueryResultFlagBits(VkQueryResultFlagBits input_value)
{
switch ((VkQueryResultFlagBits)input_value)
{
case VK_QUERY_RESULT_64_BIT:
return "VK_QUERY_RESULT_64_BIT";
- case VK_QUERY_RESULT_WITH_AVAILABILITY_BIT:
- return "VK_QUERY_RESULT_WITH_AVAILABILITY_BIT";
- case VK_QUERY_RESULT_WAIT_BIT:
- return "VK_QUERY_RESULT_WAIT_BIT";
case VK_QUERY_RESULT_PARTIAL_BIT:
return "VK_QUERY_RESULT_PARTIAL_BIT";
+ case VK_QUERY_RESULT_WAIT_BIT:
+ return "VK_QUERY_RESULT_WAIT_BIT";
+ case VK_QUERY_RESULT_WITH_AVAILABILITY_BIT:
+ return "VK_QUERY_RESULT_WITH_AVAILABILITY_BIT";
default:
return "Unhandled VkQueryResultFlagBits";
}
}
+static inline std::string string_VkQueryResultFlags(VkQueryResultFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkQueryResultFlagBits(static_cast<VkQueryResultFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkQueryResultFlagBits(static_cast<VkQueryResultFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkBufferCreateFlagBits(VkBufferCreateFlagBits input_value)
{
switch ((VkBufferCreateFlagBits)input_value)
{
- case VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT:
- return "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT";
- case VK_BUFFER_CREATE_SPARSE_BINDING_BIT:
- return "VK_BUFFER_CREATE_SPARSE_BINDING_BIT";
+ case VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT:
+ return "VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT";
case VK_BUFFER_CREATE_PROTECTED_BIT:
return "VK_BUFFER_CREATE_PROTECTED_BIT";
case VK_BUFFER_CREATE_SPARSE_ALIASED_BIT:
return "VK_BUFFER_CREATE_SPARSE_ALIASED_BIT";
+ case VK_BUFFER_CREATE_SPARSE_BINDING_BIT:
+ return "VK_BUFFER_CREATE_SPARSE_BINDING_BIT";
+ case VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT:
+ return "VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT";
default:
return "Unhandled VkBufferCreateFlagBits";
}
}
+static inline std::string string_VkBufferCreateFlags(VkBufferCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkBufferCreateFlagBits(static_cast<VkBufferCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkBufferCreateFlagBits(static_cast<VkBufferCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkBufferUsageFlagBits(VkBufferUsageFlagBits input_value)
{
switch ((VkBufferUsageFlagBits)input_value)
{
- case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT:
- return "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
- case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT:
- return "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
- case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT:
- return "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT";
+ case VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT:
+ return "VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT";
+ case VK_BUFFER_USAGE_INDEX_BUFFER_BIT:
+ return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT";
case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT:
return "VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT";
- case VK_BUFFER_USAGE_TRANSFER_SRC_BIT:
- return "VK_BUFFER_USAGE_TRANSFER_SRC_BIT";
+ case VK_BUFFER_USAGE_RAY_TRACING_BIT_NV:
+ return "VK_BUFFER_USAGE_RAY_TRACING_BIT_NV";
+ case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT:
+ return "VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT";
+ case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT:
+ return "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
+ case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT:
+ return "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
case VK_BUFFER_USAGE_TRANSFER_DST_BIT:
return "VK_BUFFER_USAGE_TRANSFER_DST_BIT";
- case VK_BUFFER_USAGE_INDEX_BUFFER_BIT:
- return "VK_BUFFER_USAGE_INDEX_BUFFER_BIT";
+ case VK_BUFFER_USAGE_TRANSFER_SRC_BIT:
+ return "VK_BUFFER_USAGE_TRANSFER_SRC_BIT";
+ case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT:
+ return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT";
+ case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT:
+ return "VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT";
case VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT:
return "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
case VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT:
return "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
+ case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT:
+ return "VK_BUFFER_USAGE_VERTEX_BUFFER_BIT";
default:
return "Unhandled VkBufferUsageFlagBits";
}
}
+static inline std::string string_VkBufferUsageFlags(VkBufferUsageFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkBufferUsageFlagBits(static_cast<VkBufferUsageFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkBufferUsageFlagBits(static_cast<VkBufferUsageFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkSharingMode(VkSharingMode input_value)
{
switch ((VkSharingMode)input_value)
{
- case VK_SHARING_MODE_EXCLUSIVE:
- return "VK_SHARING_MODE_EXCLUSIVE";
case VK_SHARING_MODE_CONCURRENT:
return "VK_SHARING_MODE_CONCURRENT";
+ case VK_SHARING_MODE_EXCLUSIVE:
+ return "VK_SHARING_MODE_EXCLUSIVE";
default:
return "Unhandled VkSharingMode";
}
@@ -1560,55 +2267,94 @@ static inline const char* string_VkImageLayout(VkImageLayout input_value)
{
switch ((VkImageLayout)input_value)
{
- case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
- return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL";
- case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
- return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL";
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL";
+ case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL";
+ case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL";
+ case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL";
case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL:
return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL";
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL";
+ case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT:
+ return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT";
case VK_IMAGE_LAYOUT_GENERAL:
return "VK_IMAGE_LAYOUT_GENERAL";
- case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL";
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
- return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL";
- case VK_IMAGE_LAYOUT_UNDEFINED:
- return "VK_IMAGE_LAYOUT_UNDEFINED";
- case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
- return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL";
case VK_IMAGE_LAYOUT_PREINITIALIZED:
return "VK_IMAGE_LAYOUT_PREINITIALIZED";
case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR";
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL";
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
- return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
+ case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL";
+ case VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV:
+ return "VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV";
case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR:
return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR";
+ case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL";
+ case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL";
+ case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL";
+ case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
+ return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL";
+ case VK_IMAGE_LAYOUT_UNDEFINED:
+ return "VK_IMAGE_LAYOUT_UNDEFINED";
default:
return "Unhandled VkImageLayout";
}
}
+static inline const char* string_VkImageViewCreateFlagBits(VkImageViewCreateFlagBits input_value)
+{
+ switch ((VkImageViewCreateFlagBits)input_value)
+ {
+ case VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT:
+ return "VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT";
+ default:
+ return "Unhandled VkImageViewCreateFlagBits";
+ }
+}
+
+static inline std::string string_VkImageViewCreateFlags(VkImageViewCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkImageViewCreateFlagBits(static_cast<VkImageViewCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkImageViewCreateFlagBits(static_cast<VkImageViewCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkImageViewType(VkImageViewType input_value)
{
switch ((VkImageViewType)input_value)
{
- case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
- return "VK_IMAGE_VIEW_TYPE_2D_ARRAY";
- case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
- return "VK_IMAGE_VIEW_TYPE_1D_ARRAY";
case VK_IMAGE_VIEW_TYPE_1D:
return "VK_IMAGE_VIEW_TYPE_1D";
+ case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+ return "VK_IMAGE_VIEW_TYPE_1D_ARRAY";
+ case VK_IMAGE_VIEW_TYPE_2D:
+ return "VK_IMAGE_VIEW_TYPE_2D";
+ case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+ return "VK_IMAGE_VIEW_TYPE_2D_ARRAY";
case VK_IMAGE_VIEW_TYPE_3D:
return "VK_IMAGE_VIEW_TYPE_3D";
case VK_IMAGE_VIEW_TYPE_CUBE:
return "VK_IMAGE_VIEW_TYPE_CUBE";
case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
return "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY";
- case VK_IMAGE_VIEW_TYPE_2D:
- return "VK_IMAGE_VIEW_TYPE_2D";
default:
return "Unhandled VkImageViewType";
}
@@ -1618,20 +2364,20 @@ static inline const char* string_VkComponentSwizzle(VkComponentSwizzle input_val
{
switch ((VkComponentSwizzle)input_value)
{
+ case VK_COMPONENT_SWIZZLE_A:
+ return "VK_COMPONENT_SWIZZLE_A";
+ case VK_COMPONENT_SWIZZLE_B:
+ return "VK_COMPONENT_SWIZZLE_B";
+ case VK_COMPONENT_SWIZZLE_G:
+ return "VK_COMPONENT_SWIZZLE_G";
+ case VK_COMPONENT_SWIZZLE_IDENTITY:
+ return "VK_COMPONENT_SWIZZLE_IDENTITY";
case VK_COMPONENT_SWIZZLE_ONE:
return "VK_COMPONENT_SWIZZLE_ONE";
case VK_COMPONENT_SWIZZLE_R:
return "VK_COMPONENT_SWIZZLE_R";
case VK_COMPONENT_SWIZZLE_ZERO:
return "VK_COMPONENT_SWIZZLE_ZERO";
- case VK_COMPONENT_SWIZZLE_IDENTITY:
- return "VK_COMPONENT_SWIZZLE_IDENTITY";
- case VK_COMPONENT_SWIZZLE_G:
- return "VK_COMPONENT_SWIZZLE_G";
- case VK_COMPONENT_SWIZZLE_A:
- return "VK_COMPONENT_SWIZZLE_A";
- case VK_COMPONENT_SWIZZLE_B:
- return "VK_COMPONENT_SWIZZLE_B";
default:
return "Unhandled VkComponentSwizzle";
}
@@ -1641,54 +2387,137 @@ static inline const char* string_VkPipelineCreateFlagBits(VkPipelineCreateFlagBi
{
switch ((VkPipelineCreateFlagBits)input_value)
{
+ case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT:
+ return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT";
+ case VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR:
+ return "VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR";
+ case VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR:
+ return "VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR";
+ case VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV:
+ return "VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV";
+ case VK_PIPELINE_CREATE_DERIVATIVE_BIT:
+ return "VK_PIPELINE_CREATE_DERIVATIVE_BIT";
case VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT:
return "VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT";
+ case VK_PIPELINE_CREATE_DISPATCH_BASE_BIT:
+ return "VK_PIPELINE_CREATE_DISPATCH_BASE_BIT";
case VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT:
return "VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT";
- case VK_PIPELINE_CREATE_DISPATCH_BASE:
- return "VK_PIPELINE_CREATE_DISPATCH_BASE";
- case VK_PIPELINE_CREATE_DERIVATIVE_BIT:
- return "VK_PIPELINE_CREATE_DERIVATIVE_BIT";
- case VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT:
- return "VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT";
default:
return "Unhandled VkPipelineCreateFlagBits";
}
}
+static inline std::string string_VkPipelineCreateFlags(VkPipelineCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPipelineCreateFlagBits(static_cast<VkPipelineCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPipelineCreateFlagBits(static_cast<VkPipelineCreateFlagBits>(0)));
+ return ret;
+}
+
+static inline const char* string_VkPipelineShaderStageCreateFlagBits(VkPipelineShaderStageCreateFlagBits input_value)
+{
+ switch ((VkPipelineShaderStageCreateFlagBits)input_value)
+ {
+ case VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT:
+ return "VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT";
+ case VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT:
+ return "VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT";
+ default:
+ return "Unhandled VkPipelineShaderStageCreateFlagBits";
+ }
+}
+
+static inline std::string string_VkPipelineShaderStageCreateFlags(VkPipelineShaderStageCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast<VkPipelineShaderStageCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPipelineShaderStageCreateFlagBits(static_cast<VkPipelineShaderStageCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkShaderStageFlagBits(VkShaderStageFlagBits input_value)
{
switch ((VkShaderStageFlagBits)input_value)
{
- case VK_SHADER_STAGE_VERTEX_BIT:
- return "VK_SHADER_STAGE_VERTEX_BIT";
case VK_SHADER_STAGE_ALL:
return "VK_SHADER_STAGE_ALL";
- case VK_SHADER_STAGE_FRAGMENT_BIT:
- return "VK_SHADER_STAGE_FRAGMENT_BIT";
- case VK_SHADER_STAGE_COMPUTE_BIT:
- return "VK_SHADER_STAGE_COMPUTE_BIT";
case VK_SHADER_STAGE_ALL_GRAPHICS:
return "VK_SHADER_STAGE_ALL_GRAPHICS";
+ case VK_SHADER_STAGE_ANY_HIT_BIT_NV:
+ return "VK_SHADER_STAGE_ANY_HIT_BIT_NV";
+ case VK_SHADER_STAGE_CALLABLE_BIT_NV:
+ return "VK_SHADER_STAGE_CALLABLE_BIT_NV";
+ case VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV:
+ return "VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV";
+ case VK_SHADER_STAGE_COMPUTE_BIT:
+ return "VK_SHADER_STAGE_COMPUTE_BIT";
+ case VK_SHADER_STAGE_FRAGMENT_BIT:
+ return "VK_SHADER_STAGE_FRAGMENT_BIT";
case VK_SHADER_STAGE_GEOMETRY_BIT:
return "VK_SHADER_STAGE_GEOMETRY_BIT";
+ case VK_SHADER_STAGE_INTERSECTION_BIT_NV:
+ return "VK_SHADER_STAGE_INTERSECTION_BIT_NV";
+ case VK_SHADER_STAGE_MESH_BIT_NV:
+ return "VK_SHADER_STAGE_MESH_BIT_NV";
+ case VK_SHADER_STAGE_MISS_BIT_NV:
+ return "VK_SHADER_STAGE_MISS_BIT_NV";
+ case VK_SHADER_STAGE_RAYGEN_BIT_NV:
+ return "VK_SHADER_STAGE_RAYGEN_BIT_NV";
+ case VK_SHADER_STAGE_TASK_BIT_NV:
+ return "VK_SHADER_STAGE_TASK_BIT_NV";
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
return "VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT";
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
return "VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT";
+ case VK_SHADER_STAGE_VERTEX_BIT:
+ return "VK_SHADER_STAGE_VERTEX_BIT";
default:
return "Unhandled VkShaderStageFlagBits";
}
}
+static inline std::string string_VkShaderStageFlags(VkShaderStageFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkShaderStageFlagBits(static_cast<VkShaderStageFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkShaderStageFlagBits(static_cast<VkShaderStageFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkVertexInputRate(VkVertexInputRate input_value)
{
switch ((VkVertexInputRate)input_value)
{
- case VK_VERTEX_INPUT_RATE_VERTEX:
- return "VK_VERTEX_INPUT_RATE_VERTEX";
case VK_VERTEX_INPUT_RATE_INSTANCE:
return "VK_VERTEX_INPUT_RATE_INSTANCE";
+ case VK_VERTEX_INPUT_RATE_VERTEX:
+ return "VK_VERTEX_INPUT_RATE_VERTEX";
default:
return "Unhandled VkVertexInputRate";
}
@@ -1698,28 +2527,28 @@ static inline const char* string_VkPrimitiveTopology(VkPrimitiveTopology input_v
{
switch ((VkPrimitiveTopology)input_value)
{
+ case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+ return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST";
+ case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+ return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY";
+ case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+ return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP";
+ case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+ return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY";
case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST";
+ case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+ return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST";
case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN";
- case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY";
+ case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+ return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST";
case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY";
- case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP";
- case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST";
- case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY";
- case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY";
case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP";
- case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
- return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST";
+ case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+ return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY";
default:
return "Unhandled VkPrimitiveTopology";
}
@@ -1729,14 +2558,14 @@ static inline const char* string_VkPolygonMode(VkPolygonMode input_value)
{
switch ((VkPolygonMode)input_value)
{
- case VK_POLYGON_MODE_POINT:
- return "VK_POLYGON_MODE_POINT";
case VK_POLYGON_MODE_FILL:
return "VK_POLYGON_MODE_FILL";
- case VK_POLYGON_MODE_LINE:
- return "VK_POLYGON_MODE_LINE";
case VK_POLYGON_MODE_FILL_RECTANGLE_NV:
return "VK_POLYGON_MODE_FILL_RECTANGLE_NV";
+ case VK_POLYGON_MODE_LINE:
+ return "VK_POLYGON_MODE_LINE";
+ case VK_POLYGON_MODE_POINT:
+ return "VK_POLYGON_MODE_POINT";
default:
return "Unhandled VkPolygonMode";
}
@@ -1746,12 +2575,12 @@ static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_val
{
switch ((VkCullModeFlagBits)input_value)
{
- case VK_CULL_MODE_FRONT_BIT:
- return "VK_CULL_MODE_FRONT_BIT";
- case VK_CULL_MODE_FRONT_AND_BACK:
- return "VK_CULL_MODE_FRONT_AND_BACK";
case VK_CULL_MODE_BACK_BIT:
return "VK_CULL_MODE_BACK_BIT";
+ case VK_CULL_MODE_FRONT_AND_BACK:
+ return "VK_CULL_MODE_FRONT_AND_BACK";
+ case VK_CULL_MODE_FRONT_BIT:
+ return "VK_CULL_MODE_FRONT_BIT";
case VK_CULL_MODE_NONE:
return "VK_CULL_MODE_NONE";
default:
@@ -1759,6 +2588,22 @@ static inline const char* string_VkCullModeFlagBits(VkCullModeFlagBits input_val
}
}
+static inline std::string string_VkCullModeFlags(VkCullModeFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkCullModeFlagBits(static_cast<VkCullModeFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkCullModeFlagBits(static_cast<VkCullModeFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkFrontFace(VkFrontFace input_value)
{
switch ((VkFrontFace)input_value)
@@ -1778,20 +2623,20 @@ static inline const char* string_VkCompareOp(VkCompareOp input_value)
{
case VK_COMPARE_OP_ALWAYS:
return "VK_COMPARE_OP_ALWAYS";
- case VK_COMPARE_OP_NOT_EQUAL:
- return "VK_COMPARE_OP_NOT_EQUAL";
+ case VK_COMPARE_OP_EQUAL:
+ return "VK_COMPARE_OP_EQUAL";
+ case VK_COMPARE_OP_GREATER:
+ return "VK_COMPARE_OP_GREATER";
+ case VK_COMPARE_OP_GREATER_OR_EQUAL:
+ return "VK_COMPARE_OP_GREATER_OR_EQUAL";
case VK_COMPARE_OP_LESS:
return "VK_COMPARE_OP_LESS";
case VK_COMPARE_OP_LESS_OR_EQUAL:
return "VK_COMPARE_OP_LESS_OR_EQUAL";
case VK_COMPARE_OP_NEVER:
return "VK_COMPARE_OP_NEVER";
- case VK_COMPARE_OP_GREATER:
- return "VK_COMPARE_OP_GREATER";
- case VK_COMPARE_OP_EQUAL:
- return "VK_COMPARE_OP_EQUAL";
- case VK_COMPARE_OP_GREATER_OR_EQUAL:
- return "VK_COMPARE_OP_GREATER_OR_EQUAL";
+ case VK_COMPARE_OP_NOT_EQUAL:
+ return "VK_COMPARE_OP_NOT_EQUAL";
default:
return "Unhandled VkCompareOp";
}
@@ -1801,22 +2646,22 @@ static inline const char* string_VkStencilOp(VkStencilOp input_value)
{
switch ((VkStencilOp)input_value)
{
+ case VK_STENCIL_OP_DECREMENT_AND_CLAMP:
+ return "VK_STENCIL_OP_DECREMENT_AND_CLAMP";
+ case VK_STENCIL_OP_DECREMENT_AND_WRAP:
+ return "VK_STENCIL_OP_DECREMENT_AND_WRAP";
+ case VK_STENCIL_OP_INCREMENT_AND_CLAMP:
+ return "VK_STENCIL_OP_INCREMENT_AND_CLAMP";
+ case VK_STENCIL_OP_INCREMENT_AND_WRAP:
+ return "VK_STENCIL_OP_INCREMENT_AND_WRAP";
case VK_STENCIL_OP_INVERT:
return "VK_STENCIL_OP_INVERT";
case VK_STENCIL_OP_KEEP:
return "VK_STENCIL_OP_KEEP";
- case VK_STENCIL_OP_DECREMENT_AND_CLAMP:
- return "VK_STENCIL_OP_DECREMENT_AND_CLAMP";
case VK_STENCIL_OP_REPLACE:
return "VK_STENCIL_OP_REPLACE";
- case VK_STENCIL_OP_INCREMENT_AND_WRAP:
- return "VK_STENCIL_OP_INCREMENT_AND_WRAP";
case VK_STENCIL_OP_ZERO:
return "VK_STENCIL_OP_ZERO";
- case VK_STENCIL_OP_INCREMENT_AND_CLAMP:
- return "VK_STENCIL_OP_INCREMENT_AND_CLAMP";
- case VK_STENCIL_OP_DECREMENT_AND_WRAP:
- return "VK_STENCIL_OP_DECREMENT_AND_WRAP";
default:
return "Unhandled VkStencilOp";
}
@@ -1826,38 +2671,38 @@ static inline const char* string_VkLogicOp(VkLogicOp input_value)
{
switch ((VkLogicOp)input_value)
{
- case VK_LOGIC_OP_NOR:
- return "VK_LOGIC_OP_NOR";
- case VK_LOGIC_OP_OR:
- return "VK_LOGIC_OP_OR";
- case VK_LOGIC_OP_NO_OP:
- return "VK_LOGIC_OP_NO_OP";
- case VK_LOGIC_OP_NAND:
- return "VK_LOGIC_OP_NAND";
- case VK_LOGIC_OP_XOR:
- return "VK_LOGIC_OP_XOR";
- case VK_LOGIC_OP_AND_REVERSE:
- return "VK_LOGIC_OP_AND_REVERSE";
- case VK_LOGIC_OP_COPY:
- return "VK_LOGIC_OP_COPY";
case VK_LOGIC_OP_AND:
return "VK_LOGIC_OP_AND";
+ case VK_LOGIC_OP_AND_INVERTED:
+ return "VK_LOGIC_OP_AND_INVERTED";
+ case VK_LOGIC_OP_AND_REVERSE:
+ return "VK_LOGIC_OP_AND_REVERSE";
case VK_LOGIC_OP_CLEAR:
return "VK_LOGIC_OP_CLEAR";
+ case VK_LOGIC_OP_COPY:
+ return "VK_LOGIC_OP_COPY";
case VK_LOGIC_OP_COPY_INVERTED:
return "VK_LOGIC_OP_COPY_INVERTED";
- case VK_LOGIC_OP_SET:
- return "VK_LOGIC_OP_SET";
+ case VK_LOGIC_OP_EQUIVALENT:
+ return "VK_LOGIC_OP_EQUIVALENT";
case VK_LOGIC_OP_INVERT:
return "VK_LOGIC_OP_INVERT";
- case VK_LOGIC_OP_AND_INVERTED:
- return "VK_LOGIC_OP_AND_INVERTED";
- case VK_LOGIC_OP_OR_REVERSE:
- return "VK_LOGIC_OP_OR_REVERSE";
+ case VK_LOGIC_OP_NAND:
+ return "VK_LOGIC_OP_NAND";
+ case VK_LOGIC_OP_NOR:
+ return "VK_LOGIC_OP_NOR";
+ case VK_LOGIC_OP_NO_OP:
+ return "VK_LOGIC_OP_NO_OP";
+ case VK_LOGIC_OP_OR:
+ return "VK_LOGIC_OP_OR";
case VK_LOGIC_OP_OR_INVERTED:
return "VK_LOGIC_OP_OR_INVERTED";
- case VK_LOGIC_OP_EQUIVALENT:
- return "VK_LOGIC_OP_EQUIVALENT";
+ case VK_LOGIC_OP_OR_REVERSE:
+ return "VK_LOGIC_OP_OR_REVERSE";
+ case VK_LOGIC_OP_SET:
+ return "VK_LOGIC_OP_SET";
+ case VK_LOGIC_OP_XOR:
+ return "VK_LOGIC_OP_XOR";
default:
return "Unhandled VkLogicOp";
}
@@ -1867,42 +2712,42 @@ static inline const char* string_VkBlendFactor(VkBlendFactor input_value)
{
switch ((VkBlendFactor)input_value)
{
- case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA:
- return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA";
case VK_BLEND_FACTOR_CONSTANT_ALPHA:
return "VK_BLEND_FACTOR_CONSTANT_ALPHA";
- case VK_BLEND_FACTOR_ONE:
- return "VK_BLEND_FACTOR_ONE";
+ case VK_BLEND_FACTOR_CONSTANT_COLOR:
+ return "VK_BLEND_FACTOR_CONSTANT_COLOR";
+ case VK_BLEND_FACTOR_DST_ALPHA:
+ return "VK_BLEND_FACTOR_DST_ALPHA";
case VK_BLEND_FACTOR_DST_COLOR:
return "VK_BLEND_FACTOR_DST_COLOR";
- case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR";
- case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR";
+ case VK_BLEND_FACTOR_ONE:
+ return "VK_BLEND_FACTOR_ONE";
case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA:
return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA";
+ case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR:
+ return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR";
case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA:
return "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA";
- case VK_BLEND_FACTOR_SRC1_COLOR:
- return "VK_BLEND_FACTOR_SRC1_COLOR";
+ case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR:
+ return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR";
case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA:
return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA";
- case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE:
- return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE";
- case VK_BLEND_FACTOR_SRC_COLOR:
- return "VK_BLEND_FACTOR_SRC_COLOR";
- case VK_BLEND_FACTOR_DST_ALPHA:
- return "VK_BLEND_FACTOR_DST_ALPHA";
- case VK_BLEND_FACTOR_SRC_ALPHA:
- return "VK_BLEND_FACTOR_SRC_ALPHA";
+ case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR:
+ return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR";
+ case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA:
+ return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA";
case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR:
return "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR";
case VK_BLEND_FACTOR_SRC1_ALPHA:
return "VK_BLEND_FACTOR_SRC1_ALPHA";
- case VK_BLEND_FACTOR_CONSTANT_COLOR:
- return "VK_BLEND_FACTOR_CONSTANT_COLOR";
- case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR:
- return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR";
+ case VK_BLEND_FACTOR_SRC1_COLOR:
+ return "VK_BLEND_FACTOR_SRC1_COLOR";
+ case VK_BLEND_FACTOR_SRC_ALPHA:
+ return "VK_BLEND_FACTOR_SRC_ALPHA";
+ case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE:
+ return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE";
+ case VK_BLEND_FACTOR_SRC_COLOR:
+ return "VK_BLEND_FACTOR_SRC_COLOR";
case VK_BLEND_FACTOR_ZERO:
return "VK_BLEND_FACTOR_ZERO";
default:
@@ -1916,106 +2761,106 @@ static inline const char* string_VkBlendOp(VkBlendOp input_value)
{
case VK_BLEND_OP_ADD:
return "VK_BLEND_OP_ADD";
- case VK_BLEND_OP_SRC_EXT:
- return "VK_BLEND_OP_SRC_EXT";
- case VK_BLEND_OP_DST_EXT:
- return "VK_BLEND_OP_DST_EXT";
+ case VK_BLEND_OP_BLUE_EXT:
+ return "VK_BLEND_OP_BLUE_EXT";
+ case VK_BLEND_OP_COLORBURN_EXT:
+ return "VK_BLEND_OP_COLORBURN_EXT";
+ case VK_BLEND_OP_COLORDODGE_EXT:
+ return "VK_BLEND_OP_COLORDODGE_EXT";
+ case VK_BLEND_OP_CONTRAST_EXT:
+ return "VK_BLEND_OP_CONTRAST_EXT";
+ case VK_BLEND_OP_DARKEN_EXT:
+ return "VK_BLEND_OP_DARKEN_EXT";
case VK_BLEND_OP_DIFFERENCE_EXT:
return "VK_BLEND_OP_DIFFERENCE_EXT";
- case VK_BLEND_OP_MINUS_EXT:
- return "VK_BLEND_OP_MINUS_EXT";
- case VK_BLEND_OP_MINUS_CLAMPED_EXT:
- return "VK_BLEND_OP_MINUS_CLAMPED_EXT";
- case VK_BLEND_OP_SOFTLIGHT_EXT:
- return "VK_BLEND_OP_SOFTLIGHT_EXT";
- case VK_BLEND_OP_LINEARDODGE_EXT:
- return "VK_BLEND_OP_LINEARDODGE_EXT";
- case VK_BLEND_OP_HARDMIX_EXT:
- return "VK_BLEND_OP_HARDMIX_EXT";
- case VK_BLEND_OP_MIN:
- return "VK_BLEND_OP_MIN";
- case VK_BLEND_OP_HSL_LUMINOSITY_EXT:
- return "VK_BLEND_OP_HSL_LUMINOSITY_EXT";
- case VK_BLEND_OP_SRC_ATOP_EXT:
- return "VK_BLEND_OP_SRC_ATOP_EXT";
- case VK_BLEND_OP_SUBTRACT:
- return "VK_BLEND_OP_SUBTRACT";
- case VK_BLEND_OP_HSL_HUE_EXT:
- return "VK_BLEND_OP_HSL_HUE_EXT";
- case VK_BLEND_OP_REVERSE_SUBTRACT:
- return "VK_BLEND_OP_REVERSE_SUBTRACT";
+ case VK_BLEND_OP_DST_ATOP_EXT:
+ return "VK_BLEND_OP_DST_ATOP_EXT";
+ case VK_BLEND_OP_DST_EXT:
+ return "VK_BLEND_OP_DST_EXT";
+ case VK_BLEND_OP_DST_IN_EXT:
+ return "VK_BLEND_OP_DST_IN_EXT";
+ case VK_BLEND_OP_DST_OUT_EXT:
+ return "VK_BLEND_OP_DST_OUT_EXT";
case VK_BLEND_OP_DST_OVER_EXT:
return "VK_BLEND_OP_DST_OVER_EXT";
- case VK_BLEND_OP_VIVIDLIGHT_EXT:
- return "VK_BLEND_OP_VIVIDLIGHT_EXT";
- case VK_BLEND_OP_HSL_COLOR_EXT:
- return "VK_BLEND_OP_HSL_COLOR_EXT";
case VK_BLEND_OP_EXCLUSION_EXT:
return "VK_BLEND_OP_EXCLUSION_EXT";
- case VK_BLEND_OP_PLUS_DARKER_EXT:
- return "VK_BLEND_OP_PLUS_DARKER_EXT";
- case VK_BLEND_OP_DST_IN_EXT:
- return "VK_BLEND_OP_DST_IN_EXT";
- case VK_BLEND_OP_INVERT_OVG_EXT:
- return "VK_BLEND_OP_INVERT_OVG_EXT";
- case VK_BLEND_OP_CONTRAST_EXT:
- return "VK_BLEND_OP_CONTRAST_EXT";
- case VK_BLEND_OP_SRC_OUT_EXT:
- return "VK_BLEND_OP_SRC_OUT_EXT";
- case VK_BLEND_OP_COLORDODGE_EXT:
- return "VK_BLEND_OP_COLORDODGE_EXT";
- case VK_BLEND_OP_SRC_IN_EXT:
- return "VK_BLEND_OP_SRC_IN_EXT";
- case VK_BLEND_OP_MAX:
- return "VK_BLEND_OP_MAX";
- case VK_BLEND_OP_HSL_SATURATION_EXT:
- return "VK_BLEND_OP_HSL_SATURATION_EXT";
- case VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT:
- return "VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT";
- case VK_BLEND_OP_DARKEN_EXT:
- return "VK_BLEND_OP_DARKEN_EXT";
- case VK_BLEND_OP_BLUE_EXT:
- return "VK_BLEND_OP_BLUE_EXT";
- case VK_BLEND_OP_XOR_EXT:
- return "VK_BLEND_OP_XOR_EXT";
+ case VK_BLEND_OP_GREEN_EXT:
+ return "VK_BLEND_OP_GREEN_EXT";
case VK_BLEND_OP_HARDLIGHT_EXT:
return "VK_BLEND_OP_HARDLIGHT_EXT";
- case VK_BLEND_OP_RED_EXT:
- return "VK_BLEND_OP_RED_EXT";
+ case VK_BLEND_OP_HARDMIX_EXT:
+ return "VK_BLEND_OP_HARDMIX_EXT";
+ case VK_BLEND_OP_HSL_COLOR_EXT:
+ return "VK_BLEND_OP_HSL_COLOR_EXT";
+ case VK_BLEND_OP_HSL_HUE_EXT:
+ return "VK_BLEND_OP_HSL_HUE_EXT";
+ case VK_BLEND_OP_HSL_LUMINOSITY_EXT:
+ return "VK_BLEND_OP_HSL_LUMINOSITY_EXT";
+ case VK_BLEND_OP_HSL_SATURATION_EXT:
+ return "VK_BLEND_OP_HSL_SATURATION_EXT";
case VK_BLEND_OP_INVERT_EXT:
return "VK_BLEND_OP_INVERT_EXT";
- case VK_BLEND_OP_ZERO_EXT:
- return "VK_BLEND_OP_ZERO_EXT";
+ case VK_BLEND_OP_INVERT_OVG_EXT:
+ return "VK_BLEND_OP_INVERT_OVG_EXT";
+ case VK_BLEND_OP_INVERT_RGB_EXT:
+ return "VK_BLEND_OP_INVERT_RGB_EXT";
case VK_BLEND_OP_LIGHTEN_EXT:
return "VK_BLEND_OP_LIGHTEN_EXT";
- case VK_BLEND_OP_SCREEN_EXT:
- return "VK_BLEND_OP_SCREEN_EXT";
- case VK_BLEND_OP_DST_OUT_EXT:
- return "VK_BLEND_OP_DST_OUT_EXT";
+ case VK_BLEND_OP_LINEARBURN_EXT:
+ return "VK_BLEND_OP_LINEARBURN_EXT";
+ case VK_BLEND_OP_LINEARDODGE_EXT:
+ return "VK_BLEND_OP_LINEARDODGE_EXT";
+ case VK_BLEND_OP_LINEARLIGHT_EXT:
+ return "VK_BLEND_OP_LINEARLIGHT_EXT";
+ case VK_BLEND_OP_MAX:
+ return "VK_BLEND_OP_MAX";
+ case VK_BLEND_OP_MIN:
+ return "VK_BLEND_OP_MIN";
+ case VK_BLEND_OP_MINUS_CLAMPED_EXT:
+ return "VK_BLEND_OP_MINUS_CLAMPED_EXT";
+ case VK_BLEND_OP_MINUS_EXT:
+ return "VK_BLEND_OP_MINUS_EXT";
case VK_BLEND_OP_MULTIPLY_EXT:
return "VK_BLEND_OP_MULTIPLY_EXT";
case VK_BLEND_OP_OVERLAY_EXT:
return "VK_BLEND_OP_OVERLAY_EXT";
- case VK_BLEND_OP_LINEARLIGHT_EXT:
- return "VK_BLEND_OP_LINEARLIGHT_EXT";
- case VK_BLEND_OP_PLUS_EXT:
- return "VK_BLEND_OP_PLUS_EXT";
- case VK_BLEND_OP_PLUS_CLAMPED_EXT:
- return "VK_BLEND_OP_PLUS_CLAMPED_EXT";
- case VK_BLEND_OP_INVERT_RGB_EXT:
- return "VK_BLEND_OP_INVERT_RGB_EXT";
- case VK_BLEND_OP_DST_ATOP_EXT:
- return "VK_BLEND_OP_DST_ATOP_EXT";
- case VK_BLEND_OP_LINEARBURN_EXT:
- return "VK_BLEND_OP_LINEARBURN_EXT";
- case VK_BLEND_OP_GREEN_EXT:
- return "VK_BLEND_OP_GREEN_EXT";
- case VK_BLEND_OP_COLORBURN_EXT:
- return "VK_BLEND_OP_COLORBURN_EXT";
case VK_BLEND_OP_PINLIGHT_EXT:
return "VK_BLEND_OP_PINLIGHT_EXT";
+ case VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT:
+ return "VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT";
+ case VK_BLEND_OP_PLUS_CLAMPED_EXT:
+ return "VK_BLEND_OP_PLUS_CLAMPED_EXT";
+ case VK_BLEND_OP_PLUS_DARKER_EXT:
+ return "VK_BLEND_OP_PLUS_DARKER_EXT";
+ case VK_BLEND_OP_PLUS_EXT:
+ return "VK_BLEND_OP_PLUS_EXT";
+ case VK_BLEND_OP_RED_EXT:
+ return "VK_BLEND_OP_RED_EXT";
+ case VK_BLEND_OP_REVERSE_SUBTRACT:
+ return "VK_BLEND_OP_REVERSE_SUBTRACT";
+ case VK_BLEND_OP_SCREEN_EXT:
+ return "VK_BLEND_OP_SCREEN_EXT";
+ case VK_BLEND_OP_SOFTLIGHT_EXT:
+ return "VK_BLEND_OP_SOFTLIGHT_EXT";
+ case VK_BLEND_OP_SRC_ATOP_EXT:
+ return "VK_BLEND_OP_SRC_ATOP_EXT";
+ case VK_BLEND_OP_SRC_EXT:
+ return "VK_BLEND_OP_SRC_EXT";
+ case VK_BLEND_OP_SRC_IN_EXT:
+ return "VK_BLEND_OP_SRC_IN_EXT";
+ case VK_BLEND_OP_SRC_OUT_EXT:
+ return "VK_BLEND_OP_SRC_OUT_EXT";
case VK_BLEND_OP_SRC_OVER_EXT:
return "VK_BLEND_OP_SRC_OVER_EXT";
+ case VK_BLEND_OP_SUBTRACT:
+ return "VK_BLEND_OP_SUBTRACT";
+ case VK_BLEND_OP_VIVIDLIGHT_EXT:
+ return "VK_BLEND_OP_VIVIDLIGHT_EXT";
+ case VK_BLEND_OP_XOR_EXT:
+ return "VK_BLEND_OP_XOR_EXT";
+ case VK_BLEND_OP_ZERO_EXT:
+ return "VK_BLEND_OP_ZERO_EXT";
default:
return "Unhandled VkBlendOp";
}
@@ -2025,60 +2870,113 @@ static inline const char* string_VkColorComponentFlagBits(VkColorComponentFlagBi
{
switch ((VkColorComponentFlagBits)input_value)
{
- case VK_COLOR_COMPONENT_R_BIT:
- return "VK_COLOR_COMPONENT_R_BIT";
+ case VK_COLOR_COMPONENT_A_BIT:
+ return "VK_COLOR_COMPONENT_A_BIT";
case VK_COLOR_COMPONENT_B_BIT:
return "VK_COLOR_COMPONENT_B_BIT";
case VK_COLOR_COMPONENT_G_BIT:
return "VK_COLOR_COMPONENT_G_BIT";
- case VK_COLOR_COMPONENT_A_BIT:
- return "VK_COLOR_COMPONENT_A_BIT";
+ case VK_COLOR_COMPONENT_R_BIT:
+ return "VK_COLOR_COMPONENT_R_BIT";
default:
return "Unhandled VkColorComponentFlagBits";
}
}
+static inline std::string string_VkColorComponentFlags(VkColorComponentFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkColorComponentFlagBits(static_cast<VkColorComponentFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkColorComponentFlagBits(static_cast<VkColorComponentFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkDynamicState(VkDynamicState input_value)
{
switch ((VkDynamicState)input_value)
{
- case VK_DYNAMIC_STATE_LINE_WIDTH:
- return "VK_DYNAMIC_STATE_LINE_WIDTH";
+ case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
+ return "VK_DYNAMIC_STATE_BLEND_CONSTANTS";
case VK_DYNAMIC_STATE_DEPTH_BIAS:
return "VK_DYNAMIC_STATE_DEPTH_BIAS";
+ case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
+ return "VK_DYNAMIC_STATE_DEPTH_BOUNDS";
+ case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
+ return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT";
+ case VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV:
+ return "VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV";
+ case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
+ return "VK_DYNAMIC_STATE_LINE_STIPPLE_EXT";
+ case VK_DYNAMIC_STATE_LINE_WIDTH:
+ return "VK_DYNAMIC_STATE_LINE_WIDTH";
+ case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
+ return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT";
+ case VK_DYNAMIC_STATE_SCISSOR:
+ return "VK_DYNAMIC_STATE_SCISSOR";
case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK";
case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
return "VK_DYNAMIC_STATE_STENCIL_REFERENCE";
- case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
- return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV";
case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK";
- case VK_DYNAMIC_STATE_SCISSOR:
- return "VK_DYNAMIC_STATE_SCISSOR";
case VK_DYNAMIC_STATE_VIEWPORT:
return "VK_DYNAMIC_STATE_VIEWPORT";
- case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
- return "VK_DYNAMIC_STATE_DEPTH_BOUNDS";
- case VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT:
- return "VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT";
- case VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT:
- return "VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT";
- case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
- return "VK_DYNAMIC_STATE_BLEND_CONSTANTS";
+ case VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV:
+ return "VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV";
+ case VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV:
+ return "VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV";
+ case VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV:
+ return "VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV";
default:
return "Unhandled VkDynamicState";
}
}
+static inline const char* string_VkSamplerCreateFlagBits(VkSamplerCreateFlagBits input_value)
+{
+ switch ((VkSamplerCreateFlagBits)input_value)
+ {
+ case VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT:
+ return "VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT";
+ case VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT:
+ return "VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT";
+ default:
+ return "Unhandled VkSamplerCreateFlagBits";
+ }
+}
+
+static inline std::string string_VkSamplerCreateFlags(VkSamplerCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSamplerCreateFlagBits(static_cast<VkSamplerCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSamplerCreateFlagBits(static_cast<VkSamplerCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkFilter(VkFilter input_value)
{
switch ((VkFilter)input_value)
{
- case VK_FILTER_LINEAR:
- return "VK_FILTER_LINEAR";
case VK_FILTER_CUBIC_IMG:
return "VK_FILTER_CUBIC_IMG";
+ case VK_FILTER_LINEAR:
+ return "VK_FILTER_LINEAR";
case VK_FILTER_NEAREST:
return "VK_FILTER_NEAREST";
default:
@@ -2090,10 +2988,10 @@ static inline const char* string_VkSamplerMipmapMode(VkSamplerMipmapMode input_v
{
switch ((VkSamplerMipmapMode)input_value)
{
- case VK_SAMPLER_MIPMAP_MODE_NEAREST:
- return "VK_SAMPLER_MIPMAP_MODE_NEAREST";
case VK_SAMPLER_MIPMAP_MODE_LINEAR:
return "VK_SAMPLER_MIPMAP_MODE_LINEAR";
+ case VK_SAMPLER_MIPMAP_MODE_NEAREST:
+ return "VK_SAMPLER_MIPMAP_MODE_NEAREST";
default:
return "Unhandled VkSamplerMipmapMode";
}
@@ -2103,14 +3001,14 @@ static inline const char* string_VkSamplerAddressMode(VkSamplerAddressMode input
{
switch ((VkSamplerAddressMode)input_value)
{
- case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE:
- return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE";
case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER:
return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER";
- case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT:
- return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT";
case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE:
return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE";
+ case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT:
+ return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT";
+ case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE:
+ return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE";
case VK_SAMPLER_ADDRESS_MODE_REPEAT:
return "VK_SAMPLER_ADDRESS_MODE_REPEAT";
default:
@@ -2122,18 +3020,18 @@ static inline const char* string_VkBorderColor(VkBorderColor input_value)
{
switch ((VkBorderColor)input_value)
{
+ case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
+ return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK";
+ case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
+ return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE";
case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK";
case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
return "VK_BORDER_COLOR_INT_OPAQUE_BLACK";
- case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
- return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK";
case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
return "VK_BORDER_COLOR_INT_OPAQUE_WHITE";
- case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
- return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE";
- case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
- return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK";
+ case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
+ return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK";
default:
return "Unhandled VkBorderColor";
}
@@ -2143,41 +3041,61 @@ static inline const char* string_VkDescriptorSetLayoutCreateFlagBits(VkDescripto
{
switch ((VkDescriptorSetLayoutCreateFlagBits)input_value)
{
- case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT:
- return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT";
case VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR:
return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR";
+ case VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT:
+ return "VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT";
default:
return "Unhandled VkDescriptorSetLayoutCreateFlagBits";
}
}
+static inline std::string string_VkDescriptorSetLayoutCreateFlags(VkDescriptorSetLayoutCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast<VkDescriptorSetLayoutCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDescriptorSetLayoutCreateFlagBits(static_cast<VkDescriptorSetLayoutCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkDescriptorType(VkDescriptorType input_value)
{
switch ((VkDescriptorType)input_value)
{
+ case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV:
+ return "VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV";
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER";
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER";
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC";
+ case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
+ return "VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT";
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT";
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER";
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER";
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE";
case VK_DESCRIPTOR_TYPE_SAMPLER:
return "VK_DESCRIPTOR_TYPE_SAMPLER";
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE";
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC";
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER";
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC";
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE";
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER";
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER";
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC";
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER";
default:
return "Unhandled VkDescriptorType";
}
@@ -2187,15 +3105,58 @@ static inline const char* string_VkDescriptorPoolCreateFlagBits(VkDescriptorPool
{
switch ((VkDescriptorPoolCreateFlagBits)input_value)
{
- case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT:
- return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT";
case VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT:
return "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT";
+ case VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT:
+ return "VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT";
default:
return "Unhandled VkDescriptorPoolCreateFlagBits";
}
}
+static inline std::string string_VkDescriptorPoolCreateFlags(VkDescriptorPoolCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast<VkDescriptorPoolCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDescriptorPoolCreateFlagBits(static_cast<VkDescriptorPoolCreateFlagBits>(0)));
+ return ret;
+}
+
+static inline const char* string_VkFramebufferCreateFlagBits(VkFramebufferCreateFlagBits input_value)
+{
+ switch ((VkFramebufferCreateFlagBits)input_value)
+ {
+ case VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT:
+ return "VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT";
+ default:
+ return "Unhandled VkFramebufferCreateFlagBits";
+ }
+}
+
+static inline std::string string_VkFramebufferCreateFlags(VkFramebufferCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkFramebufferCreateFlagBits(static_cast<VkFramebufferCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkFramebufferCreateFlagBits(static_cast<VkFramebufferCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkAttachmentDescriptionFlagBits(VkAttachmentDescriptionFlagBits input_value)
{
switch ((VkAttachmentDescriptionFlagBits)input_value)
@@ -2207,14 +3168,30 @@ static inline const char* string_VkAttachmentDescriptionFlagBits(VkAttachmentDes
}
}
+static inline std::string string_VkAttachmentDescriptionFlags(VkAttachmentDescriptionFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkAttachmentDescriptionFlagBits(static_cast<VkAttachmentDescriptionFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkAttachmentDescriptionFlagBits(static_cast<VkAttachmentDescriptionFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkAttachmentLoadOp(VkAttachmentLoadOp input_value)
{
switch ((VkAttachmentLoadOp)input_value)
{
- case VK_ATTACHMENT_LOAD_OP_DONT_CARE:
- return "VK_ATTACHMENT_LOAD_OP_DONT_CARE";
case VK_ATTACHMENT_LOAD_OP_CLEAR:
return "VK_ATTACHMENT_LOAD_OP_CLEAR";
+ case VK_ATTACHMENT_LOAD_OP_DONT_CARE:
+ return "VK_ATTACHMENT_LOAD_OP_DONT_CARE";
case VK_ATTACHMENT_LOAD_OP_LOAD:
return "VK_ATTACHMENT_LOAD_OP_LOAD";
default:
@@ -2248,6 +3225,22 @@ static inline const char* string_VkSubpassDescriptionFlagBits(VkSubpassDescripti
}
}
+static inline std::string string_VkSubpassDescriptionFlags(VkSubpassDescriptionFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSubpassDescriptionFlagBits(static_cast<VkSubpassDescriptionFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSubpassDescriptionFlagBits(static_cast<VkSubpassDescriptionFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_value)
{
switch ((VkPipelineBindPoint)input_value)
@@ -2256,6 +3249,8 @@ static inline const char* string_VkPipelineBindPoint(VkPipelineBindPoint input_v
return "VK_PIPELINE_BIND_POINT_COMPUTE";
case VK_PIPELINE_BIND_POINT_GRAPHICS:
return "VK_PIPELINE_BIND_POINT_GRAPHICS";
+ case VK_PIPELINE_BIND_POINT_RAY_TRACING_NV:
+ return "VK_PIPELINE_BIND_POINT_RAY_TRACING_NV";
default:
return "Unhandled VkPipelineBindPoint";
}
@@ -2265,59 +3260,91 @@ static inline const char* string_VkAccessFlagBits(VkAccessFlagBits input_value)
{
switch ((VkAccessFlagBits)input_value)
{
- case VK_ACCESS_UNIFORM_READ_BIT:
- return "VK_ACCESS_UNIFORM_READ_BIT";
- case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:
- return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT";
- case VK_ACCESS_INDIRECT_COMMAND_READ_BIT:
- return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT";
- case VK_ACCESS_HOST_READ_BIT:
- return "VK_ACCESS_HOST_READ_BIT";
- case VK_ACCESS_HOST_WRITE_BIT:
- return "VK_ACCESS_HOST_WRITE_BIT";
- case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT:
- return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT";
+ case VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV:
+ return "VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV";
+ case VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV:
+ return "VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV";
case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:
return "VK_ACCESS_COLOR_ATTACHMENT_READ_BIT";
- case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
- return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT";
+ case VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT:
+ return "VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT";
case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:
return "VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT";
- case VK_ACCESS_TRANSFER_WRITE_BIT:
- return "VK_ACCESS_TRANSFER_WRITE_BIT";
+ case VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX:
+ return "VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX";
case VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX:
return "VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX";
+ case VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT:
+ return "VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT";
+ case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:
+ return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT";
+ case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
+ return "VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT";
+ case VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT:
+ return "VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT";
+ case VK_ACCESS_HOST_READ_BIT:
+ return "VK_ACCESS_HOST_READ_BIT";
+ case VK_ACCESS_HOST_WRITE_BIT:
+ return "VK_ACCESS_HOST_WRITE_BIT";
+ case VK_ACCESS_INDEX_READ_BIT:
+ return "VK_ACCESS_INDEX_READ_BIT";
+ case VK_ACCESS_INDIRECT_COMMAND_READ_BIT:
+ return "VK_ACCESS_INDIRECT_COMMAND_READ_BIT";
case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:
return "VK_ACCESS_INPUT_ATTACHMENT_READ_BIT";
- case VK_ACCESS_SHADER_READ_BIT:
- return "VK_ACCESS_SHADER_READ_BIT";
- case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:
- return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT";
- case VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX:
- return "VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX";
case VK_ACCESS_MEMORY_READ_BIT:
return "VK_ACCESS_MEMORY_READ_BIT";
- case VK_ACCESS_SHADER_WRITE_BIT:
- return "VK_ACCESS_SHADER_WRITE_BIT";
- case VK_ACCESS_INDEX_READ_BIT:
- return "VK_ACCESS_INDEX_READ_BIT";
case VK_ACCESS_MEMORY_WRITE_BIT:
return "VK_ACCESS_MEMORY_WRITE_BIT";
+ case VK_ACCESS_SHADER_READ_BIT:
+ return "VK_ACCESS_SHADER_READ_BIT";
+ case VK_ACCESS_SHADER_WRITE_BIT:
+ return "VK_ACCESS_SHADER_WRITE_BIT";
+ case VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV:
+ return "VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV";
case VK_ACCESS_TRANSFER_READ_BIT:
return "VK_ACCESS_TRANSFER_READ_BIT";
+ case VK_ACCESS_TRANSFER_WRITE_BIT:
+ return "VK_ACCESS_TRANSFER_WRITE_BIT";
+ case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT:
+ return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT";
+ case VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT:
+ return "VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT";
+ case VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT:
+ return "VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT";
+ case VK_ACCESS_UNIFORM_READ_BIT:
+ return "VK_ACCESS_UNIFORM_READ_BIT";
+ case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:
+ return "VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT";
default:
return "Unhandled VkAccessFlagBits";
}
}
+static inline std::string string_VkAccessFlags(VkAccessFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkAccessFlagBits(static_cast<VkAccessFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkAccessFlagBits(static_cast<VkAccessFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input_value)
{
switch ((VkDependencyFlagBits)input_value)
{
- case VK_DEPENDENCY_DEVICE_GROUP_BIT:
- return "VK_DEPENDENCY_DEVICE_GROUP_BIT";
case VK_DEPENDENCY_BY_REGION_BIT:
return "VK_DEPENDENCY_BY_REGION_BIT";
+ case VK_DEPENDENCY_DEVICE_GROUP_BIT:
+ return "VK_DEPENDENCY_DEVICE_GROUP_BIT";
case VK_DEPENDENCY_VIEW_LOCAL_BIT:
return "VK_DEPENDENCY_VIEW_LOCAL_BIT";
default:
@@ -2325,21 +3352,53 @@ static inline const char* string_VkDependencyFlagBits(VkDependencyFlagBits input
}
}
+static inline std::string string_VkDependencyFlags(VkDependencyFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDependencyFlagBits(static_cast<VkDependencyFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDependencyFlagBits(static_cast<VkDependencyFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkCommandPoolCreateFlagBits(VkCommandPoolCreateFlagBits input_value)
{
switch ((VkCommandPoolCreateFlagBits)input_value)
{
- case VK_COMMAND_POOL_CREATE_TRANSIENT_BIT:
- return "VK_COMMAND_POOL_CREATE_TRANSIENT_BIT";
- case VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT:
- return "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT";
case VK_COMMAND_POOL_CREATE_PROTECTED_BIT:
return "VK_COMMAND_POOL_CREATE_PROTECTED_BIT";
+ case VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT:
+ return "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT";
+ case VK_COMMAND_POOL_CREATE_TRANSIENT_BIT:
+ return "VK_COMMAND_POOL_CREATE_TRANSIENT_BIT";
default:
return "Unhandled VkCommandPoolCreateFlagBits";
}
}
+static inline std::string string_VkCommandPoolCreateFlags(VkCommandPoolCreateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkCommandPoolCreateFlagBits(static_cast<VkCommandPoolCreateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkCommandPoolCreateFlagBits(static_cast<VkCommandPoolCreateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkCommandPoolResetFlagBits(VkCommandPoolResetFlagBits input_value)
{
switch ((VkCommandPoolResetFlagBits)input_value)
@@ -2351,14 +3410,30 @@ static inline const char* string_VkCommandPoolResetFlagBits(VkCommandPoolResetFl
}
}
+static inline std::string string_VkCommandPoolResetFlags(VkCommandPoolResetFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkCommandPoolResetFlagBits(static_cast<VkCommandPoolResetFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkCommandPoolResetFlagBits(static_cast<VkCommandPoolResetFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkCommandBufferLevel(VkCommandBufferLevel input_value)
{
switch ((VkCommandBufferLevel)input_value)
{
- case VK_COMMAND_BUFFER_LEVEL_SECONDARY:
- return "VK_COMMAND_BUFFER_LEVEL_SECONDARY";
case VK_COMMAND_BUFFER_LEVEL_PRIMARY:
return "VK_COMMAND_BUFFER_LEVEL_PRIMARY";
+ case VK_COMMAND_BUFFER_LEVEL_SECONDARY:
+ return "VK_COMMAND_BUFFER_LEVEL_SECONDARY";
default:
return "Unhandled VkCommandBufferLevel";
}
@@ -2368,17 +3443,33 @@ static inline const char* string_VkCommandBufferUsageFlagBits(VkCommandBufferUsa
{
switch ((VkCommandBufferUsageFlagBits)input_value)
{
- case VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT:
- return "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT";
- case VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT:
- return "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT";
case VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT:
return "VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT";
+ case VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT:
+ return "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT";
+ case VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT:
+ return "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT";
default:
return "Unhandled VkCommandBufferUsageFlagBits";
}
}
+static inline std::string string_VkCommandBufferUsageFlags(VkCommandBufferUsageFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkCommandBufferUsageFlagBits(static_cast<VkCommandBufferUsageFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkCommandBufferUsageFlagBits(static_cast<VkCommandBufferUsageFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkQueryControlFlagBits(VkQueryControlFlagBits input_value)
{
switch ((VkQueryControlFlagBits)input_value)
@@ -2390,6 +3481,22 @@ static inline const char* string_VkQueryControlFlagBits(VkQueryControlFlagBits i
}
}
+static inline std::string string_VkQueryControlFlags(VkQueryControlFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkQueryControlFlagBits(static_cast<VkQueryControlFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkQueryControlFlagBits(static_cast<VkQueryControlFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkCommandBufferResetFlagBits(VkCommandBufferResetFlagBits input_value)
{
switch ((VkCommandBufferResetFlagBits)input_value)
@@ -2401,14 +3508,30 @@ static inline const char* string_VkCommandBufferResetFlagBits(VkCommandBufferRes
}
}
+static inline std::string string_VkCommandBufferResetFlags(VkCommandBufferResetFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkCommandBufferResetFlagBits(static_cast<VkCommandBufferResetFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkCommandBufferResetFlagBits(static_cast<VkCommandBufferResetFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits input_value)
{
switch ((VkStencilFaceFlagBits)input_value)
{
case VK_STENCIL_FACE_BACK_BIT:
return "VK_STENCIL_FACE_BACK_BIT";
- case VK_STENCIL_FRONT_AND_BACK:
- return "VK_STENCIL_FRONT_AND_BACK";
+ case VK_STENCIL_FACE_FRONT_AND_BACK:
+ return "VK_STENCIL_FACE_FRONT_AND_BACK";
case VK_STENCIL_FACE_FRONT_BIT:
return "VK_STENCIL_FACE_FRONT_BIT";
default:
@@ -2416,14 +3539,34 @@ static inline const char* string_VkStencilFaceFlagBits(VkStencilFaceFlagBits inp
}
}
+static inline std::string string_VkStencilFaceFlags(VkStencilFaceFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkStencilFaceFlagBits(static_cast<VkStencilFaceFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkStencilFaceFlagBits(static_cast<VkStencilFaceFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkIndexType(VkIndexType input_value)
{
switch ((VkIndexType)input_value)
{
+ case VK_INDEX_TYPE_NONE_NV:
+ return "VK_INDEX_TYPE_NONE_NV";
case VK_INDEX_TYPE_UINT16:
return "VK_INDEX_TYPE_UINT16";
case VK_INDEX_TYPE_UINT32:
return "VK_INDEX_TYPE_UINT32";
+ case VK_INDEX_TYPE_UINT8_EXT:
+ return "VK_INDEX_TYPE_UINT8_EXT";
default:
return "Unhandled VkIndexType";
}
@@ -2433,10 +3576,10 @@ static inline const char* string_VkSubpassContents(VkSubpassContents input_value
{
switch ((VkSubpassContents)input_value)
{
- case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS:
- return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS";
case VK_SUBPASS_CONTENTS_INLINE:
return "VK_SUBPASS_CONTENTS_INLINE";
+ case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS:
+ return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS";
default:
return "Unhandled VkSubpassContents";
}
@@ -2446,78 +3589,82 @@ static inline const char* string_VkObjectType(VkObjectType input_value)
{
switch ((VkObjectType)input_value)
{
- case VK_OBJECT_TYPE_SEMAPHORE:
- return "VK_OBJECT_TYPE_SEMAPHORE";
- case VK_OBJECT_TYPE_PIPELINE:
- return "VK_OBJECT_TYPE_PIPELINE";
- case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
- return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT";
- case VK_OBJECT_TYPE_SURFACE_KHR:
- return "VK_OBJECT_TYPE_SURFACE_KHR";
+ case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
+ return "VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV";
case VK_OBJECT_TYPE_BUFFER:
return "VK_OBJECT_TYPE_BUFFER";
- case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
- return "VK_OBJECT_TYPE_PHYSICAL_DEVICE";
- case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
- return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION";
- case VK_OBJECT_TYPE_QUEUE:
- return "VK_OBJECT_TYPE_QUEUE";
- case VK_OBJECT_TYPE_DEVICE:
- return "VK_OBJECT_TYPE_DEVICE";
+ case VK_OBJECT_TYPE_BUFFER_VIEW:
+ return "VK_OBJECT_TYPE_BUFFER_VIEW";
case VK_OBJECT_TYPE_COMMAND_BUFFER:
return "VK_OBJECT_TYPE_COMMAND_BUFFER";
- case VK_OBJECT_TYPE_DESCRIPTOR_SET:
- return "VK_OBJECT_TYPE_DESCRIPTOR_SET";
- case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
- return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT";
case VK_OBJECT_TYPE_COMMAND_POOL:
return "VK_OBJECT_TYPE_COMMAND_POOL";
- case VK_OBJECT_TYPE_UNKNOWN:
- return "VK_OBJECT_TYPE_UNKNOWN";
+ case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
+ return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT";
+ case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
+ return "VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT";
case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
return "VK_OBJECT_TYPE_DESCRIPTOR_POOL";
+ case VK_OBJECT_TYPE_DESCRIPTOR_SET:
+ return "VK_OBJECT_TYPE_DESCRIPTOR_SET";
+ case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
+ return "VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT";
case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
return "VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE";
- case VK_OBJECT_TYPE_BUFFER_VIEW:
- return "VK_OBJECT_TYPE_BUFFER_VIEW";
+ case VK_OBJECT_TYPE_DEVICE:
+ return "VK_OBJECT_TYPE_DEVICE";
case VK_OBJECT_TYPE_DEVICE_MEMORY:
return "VK_OBJECT_TYPE_DEVICE_MEMORY";
- case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
- return "VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT";
- case VK_OBJECT_TYPE_IMAGE:
- return "VK_OBJECT_TYPE_IMAGE";
- case VK_OBJECT_TYPE_INSTANCE:
- return "VK_OBJECT_TYPE_INSTANCE";
+ case VK_OBJECT_TYPE_DISPLAY_KHR:
+ return "VK_OBJECT_TYPE_DISPLAY_KHR";
case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
return "VK_OBJECT_TYPE_DISPLAY_MODE_KHR";
+ case VK_OBJECT_TYPE_EVENT:
+ return "VK_OBJECT_TYPE_EVENT";
+ case VK_OBJECT_TYPE_FENCE:
+ return "VK_OBJECT_TYPE_FENCE";
+ case VK_OBJECT_TYPE_FRAMEBUFFER:
+ return "VK_OBJECT_TYPE_FRAMEBUFFER";
+ case VK_OBJECT_TYPE_IMAGE:
+ return "VK_OBJECT_TYPE_IMAGE";
case VK_OBJECT_TYPE_IMAGE_VIEW:
return "VK_OBJECT_TYPE_IMAGE_VIEW";
+ case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
+ return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX";
+ case VK_OBJECT_TYPE_INSTANCE:
+ return "VK_OBJECT_TYPE_INSTANCE";
+ case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
+ return "VK_OBJECT_TYPE_OBJECT_TABLE_NVX";
+ case VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL:
+ return "VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL";
+ case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
+ return "VK_OBJECT_TYPE_PHYSICAL_DEVICE";
+ case VK_OBJECT_TYPE_PIPELINE:
+ return "VK_OBJECT_TYPE_PIPELINE";
+ case VK_OBJECT_TYPE_PIPELINE_CACHE:
+ return "VK_OBJECT_TYPE_PIPELINE_CACHE";
case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
return "VK_OBJECT_TYPE_PIPELINE_LAYOUT";
- case VK_OBJECT_TYPE_EVENT:
- return "VK_OBJECT_TYPE_EVENT";
+ case VK_OBJECT_TYPE_QUERY_POOL:
+ return "VK_OBJECT_TYPE_QUERY_POOL";
+ case VK_OBJECT_TYPE_QUEUE:
+ return "VK_OBJECT_TYPE_QUEUE";
case VK_OBJECT_TYPE_RENDER_PASS:
return "VK_OBJECT_TYPE_RENDER_PASS";
- case VK_OBJECT_TYPE_FRAMEBUFFER:
- return "VK_OBJECT_TYPE_FRAMEBUFFER";
case VK_OBJECT_TYPE_SAMPLER:
return "VK_OBJECT_TYPE_SAMPLER";
- case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
- return "VK_OBJECT_TYPE_SWAPCHAIN_KHR";
- case VK_OBJECT_TYPE_QUERY_POOL:
- return "VK_OBJECT_TYPE_QUERY_POOL";
- case VK_OBJECT_TYPE_DISPLAY_KHR:
- return "VK_OBJECT_TYPE_DISPLAY_KHR";
+ case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
+ return "VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION";
+ case VK_OBJECT_TYPE_SEMAPHORE:
+ return "VK_OBJECT_TYPE_SEMAPHORE";
case VK_OBJECT_TYPE_SHADER_MODULE:
return "VK_OBJECT_TYPE_SHADER_MODULE";
- case VK_OBJECT_TYPE_PIPELINE_CACHE:
- return "VK_OBJECT_TYPE_PIPELINE_CACHE";
- case VK_OBJECT_TYPE_FENCE:
- return "VK_OBJECT_TYPE_FENCE";
- case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
- return "VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX";
- case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
- return "VK_OBJECT_TYPE_OBJECT_TABLE_NVX";
+ case VK_OBJECT_TYPE_SURFACE_KHR:
+ return "VK_OBJECT_TYPE_SURFACE_KHR";
+ case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
+ return "VK_OBJECT_TYPE_SWAPCHAIN_KHR";
+ case VK_OBJECT_TYPE_UNKNOWN:
+ return "VK_OBJECT_TYPE_UNKNOWN";
case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
return "VK_OBJECT_TYPE_VALIDATION_CACHE_EXT";
default:
@@ -2525,54 +3672,105 @@ static inline const char* string_VkObjectType(VkObjectType input_value)
}
}
+static inline const char* string_VkVendorId(VkVendorId input_value)
+{
+ switch ((VkVendorId)input_value)
+ {
+ case VK_VENDOR_ID_KAZAN:
+ return "VK_VENDOR_ID_KAZAN";
+ case VK_VENDOR_ID_VIV:
+ return "VK_VENDOR_ID_VIV";
+ case VK_VENDOR_ID_VSI:
+ return "VK_VENDOR_ID_VSI";
+ default:
+ return "Unhandled VkVendorId";
+ }
+}
+
static inline const char* string_VkSubgroupFeatureFlagBits(VkSubgroupFeatureFlagBits input_value)
{
switch ((VkSubgroupFeatureFlagBits)input_value)
{
- case VK_SUBGROUP_FEATURE_SHUFFLE_BIT:
- return "VK_SUBGROUP_FEATURE_SHUFFLE_BIT";
- case VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT:
- return "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT";
- case VK_SUBGROUP_FEATURE_QUAD_BIT:
- return "VK_SUBGROUP_FEATURE_QUAD_BIT";
+ case VK_SUBGROUP_FEATURE_ARITHMETIC_BIT:
+ return "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT";
case VK_SUBGROUP_FEATURE_BALLOT_BIT:
return "VK_SUBGROUP_FEATURE_BALLOT_BIT";
+ case VK_SUBGROUP_FEATURE_BASIC_BIT:
+ return "VK_SUBGROUP_FEATURE_BASIC_BIT";
case VK_SUBGROUP_FEATURE_CLUSTERED_BIT:
return "VK_SUBGROUP_FEATURE_CLUSTERED_BIT";
- case VK_SUBGROUP_FEATURE_ARITHMETIC_BIT:
- return "VK_SUBGROUP_FEATURE_ARITHMETIC_BIT";
- case VK_SUBGROUP_FEATURE_VOTE_BIT:
- return "VK_SUBGROUP_FEATURE_VOTE_BIT";
case VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV:
return "VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV";
- case VK_SUBGROUP_FEATURE_BASIC_BIT:
- return "VK_SUBGROUP_FEATURE_BASIC_BIT";
+ case VK_SUBGROUP_FEATURE_QUAD_BIT:
+ return "VK_SUBGROUP_FEATURE_QUAD_BIT";
+ case VK_SUBGROUP_FEATURE_SHUFFLE_BIT:
+ return "VK_SUBGROUP_FEATURE_SHUFFLE_BIT";
+ case VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT:
+ return "VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT";
+ case VK_SUBGROUP_FEATURE_VOTE_BIT:
+ return "VK_SUBGROUP_FEATURE_VOTE_BIT";
default:
return "Unhandled VkSubgroupFeatureFlagBits";
}
}
+static inline std::string string_VkSubgroupFeatureFlags(VkSubgroupFeatureFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSubgroupFeatureFlagBits(static_cast<VkSubgroupFeatureFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSubgroupFeatureFlagBits(static_cast<VkSubgroupFeatureFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkPeerMemoryFeatureFlagBits(VkPeerMemoryFeatureFlagBits input_value)
{
switch ((VkPeerMemoryFeatureFlagBits)input_value)
{
- case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
+ case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
+ return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT:
return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT";
+ case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
+ return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT:
return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT";
- case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
default:
return "Unhandled VkPeerMemoryFeatureFlagBits";
}
}
+static inline std::string string_VkPeerMemoryFeatureFlags(VkPeerMemoryFeatureFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast<VkPeerMemoryFeatureFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPeerMemoryFeatureFlagBits(static_cast<VkPeerMemoryFeatureFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkMemoryAllocateFlagBits(VkMemoryAllocateFlagBits input_value)
{
switch ((VkMemoryAllocateFlagBits)input_value)
{
+ case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT:
+ return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT";
+ case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT:
+ return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT";
case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT:
return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT";
default:
@@ -2580,6 +3778,22 @@ static inline const char* string_VkMemoryAllocateFlagBits(VkMemoryAllocateFlagBi
}
}
+static inline std::string string_VkMemoryAllocateFlags(VkMemoryAllocateFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkMemoryAllocateFlagBits(static_cast<VkMemoryAllocateFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkMemoryAllocateFlagBits(static_cast<VkMemoryAllocateFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkPointClippingBehavior(VkPointClippingBehavior input_value)
{
switch ((VkPointClippingBehavior)input_value)
@@ -2610,16 +3824,16 @@ static inline const char* string_VkSamplerYcbcrModelConversion(VkSamplerYcbcrMod
{
switch ((VkSamplerYcbcrModelConversion)input_value)
{
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY:
return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
+ case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
+ return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601:
return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601";
+ case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
+ return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
+ case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
+ return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
default:
return "Unhandled VkSamplerYcbcrModelConversion";
}
@@ -2655,10 +3869,10 @@ static inline const char* string_VkDescriptorUpdateTemplateType(VkDescriptorUpda
{
switch ((VkDescriptorUpdateTemplateType)input_value)
{
- case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
- return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET:
return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET";
+ case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
+ return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
default:
return "Unhandled VkDescriptorUpdateTemplateType";
}
@@ -2668,41 +3882,57 @@ static inline const char* string_VkExternalMemoryHandleTypeFlagBits(VkExternalMe
{
switch ((VkExternalMemoryHandleTypeFlagBits)input_value)
{
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
default:
return "Unhandled VkExternalMemoryHandleTypeFlagBits";
}
}
+static inline std::string string_VkExternalMemoryHandleTypeFlags(VkExternalMemoryHandleTypeFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast<VkExternalMemoryHandleTypeFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBits(static_cast<VkExternalMemoryHandleTypeFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalMemoryFeatureFlagBits(VkExternalMemoryFeatureFlagBits input_value)
{
switch ((VkExternalMemoryFeatureFlagBits)input_value)
{
- case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT:
return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT";
+ case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
+ return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT:
return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT";
default:
@@ -2710,36 +3940,84 @@ static inline const char* string_VkExternalMemoryFeatureFlagBits(VkExternalMemor
}
}
+static inline std::string string_VkExternalMemoryFeatureFlags(VkExternalMemoryFeatureFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast<VkExternalMemoryFeatureFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBits(static_cast<VkExternalMemoryFeatureFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalFenceHandleTypeFlagBits(VkExternalFenceHandleTypeFlagBits input_value)
{
switch ((VkExternalFenceHandleTypeFlagBits)input_value)
{
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT";
case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+ case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+ return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+ case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
+ return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
default:
return "Unhandled VkExternalFenceHandleTypeFlagBits";
}
}
+static inline std::string string_VkExternalFenceHandleTypeFlags(VkExternalFenceHandleTypeFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast<VkExternalFenceHandleTypeFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalFenceHandleTypeFlagBits(static_cast<VkExternalFenceHandleTypeFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalFenceFeatureFlagBits(VkExternalFenceFeatureFlagBits input_value)
{
switch ((VkExternalFenceFeatureFlagBits)input_value)
{
- case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT:
return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT";
+ case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
+ return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
default:
return "Unhandled VkExternalFenceFeatureFlagBits";
}
}
+static inline std::string string_VkExternalFenceFeatureFlags(VkExternalFenceFeatureFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalFenceFeatureFlagBits(static_cast<VkExternalFenceFeatureFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalFenceFeatureFlagBits(static_cast<VkExternalFenceFeatureFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkFenceImportFlagBits(VkFenceImportFlagBits input_value)
{
switch ((VkFenceImportFlagBits)input_value)
@@ -2751,6 +4029,22 @@ static inline const char* string_VkFenceImportFlagBits(VkFenceImportFlagBits inp
}
}
+static inline std::string string_VkFenceImportFlags(VkFenceImportFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkFenceImportFlagBits(static_cast<VkFenceImportFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkFenceImportFlagBits(static_cast<VkFenceImportFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkSemaphoreImportFlagBits(VkSemaphoreImportFlagBits input_value)
{
switch ((VkSemaphoreImportFlagBits)input_value)
@@ -2762,25 +4056,57 @@ static inline const char* string_VkSemaphoreImportFlagBits(VkSemaphoreImportFlag
}
}
+static inline std::string string_VkSemaphoreImportFlags(VkSemaphoreImportFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSemaphoreImportFlagBits(static_cast<VkSemaphoreImportFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSemaphoreImportFlagBits(static_cast<VkSemaphoreImportFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalSemaphoreHandleTypeFlagBits(VkExternalSemaphoreHandleTypeFlagBits input_value)
{
switch ((VkExternalSemaphoreHandleTypeFlagBits)input_value)
{
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT:
return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT";
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT";
+ case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+ return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+ case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
+ return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
default:
return "Unhandled VkExternalSemaphoreHandleTypeFlagBits";
}
}
+static inline std::string string_VkExternalSemaphoreHandleTypeFlags(VkExternalSemaphoreHandleTypeFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast<VkExternalSemaphoreHandleTypeFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalSemaphoreHandleTypeFlagBits(static_cast<VkExternalSemaphoreHandleTypeFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalSemaphoreFeatureFlagBits(VkExternalSemaphoreFeatureFlagBits input_value)
{
switch ((VkExternalSemaphoreFeatureFlagBits)input_value)
@@ -2794,33 +4120,236 @@ static inline const char* string_VkExternalSemaphoreFeatureFlagBits(VkExternalSe
}
}
+static inline std::string string_VkExternalSemaphoreFeatureFlags(VkExternalSemaphoreFeatureFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast<VkExternalSemaphoreFeatureFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalSemaphoreFeatureFlagBits(static_cast<VkExternalSemaphoreFeatureFlagBits>(0)));
+ return ret;
+}
+
+static inline const char* string_VkDriverId(VkDriverId input_value)
+{
+ switch ((VkDriverId)input_value)
+ {
+ case VK_DRIVER_ID_AMD_OPEN_SOURCE:
+ return "VK_DRIVER_ID_AMD_OPEN_SOURCE";
+ case VK_DRIVER_ID_AMD_PROPRIETARY:
+ return "VK_DRIVER_ID_AMD_PROPRIETARY";
+ case VK_DRIVER_ID_ARM_PROPRIETARY:
+ return "VK_DRIVER_ID_ARM_PROPRIETARY";
+ case VK_DRIVER_ID_BROADCOM_PROPRIETARY:
+ return "VK_DRIVER_ID_BROADCOM_PROPRIETARY";
+ case VK_DRIVER_ID_GGP_PROPRIETARY:
+ return "VK_DRIVER_ID_GGP_PROPRIETARY";
+ case VK_DRIVER_ID_GOOGLE_SWIFTSHADER:
+ return "VK_DRIVER_ID_GOOGLE_SWIFTSHADER";
+ case VK_DRIVER_ID_IMAGINATION_PROPRIETARY:
+ return "VK_DRIVER_ID_IMAGINATION_PROPRIETARY";
+ case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA:
+ return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA";
+ case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS:
+ return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS";
+ case VK_DRIVER_ID_MESA_RADV:
+ return "VK_DRIVER_ID_MESA_RADV";
+ case VK_DRIVER_ID_NVIDIA_PROPRIETARY:
+ return "VK_DRIVER_ID_NVIDIA_PROPRIETARY";
+ case VK_DRIVER_ID_QUALCOMM_PROPRIETARY:
+ return "VK_DRIVER_ID_QUALCOMM_PROPRIETARY";
+ default:
+ return "Unhandled VkDriverId";
+ }
+}
+
+static inline const char* string_VkShaderFloatControlsIndependence(VkShaderFloatControlsIndependence input_value)
+{
+ switch ((VkShaderFloatControlsIndependence)input_value)
+ {
+ case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY:
+ return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY";
+ case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL:
+ return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL";
+ case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE:
+ return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE";
+ default:
+ return "Unhandled VkShaderFloatControlsIndependence";
+ }
+}
+
+static inline const char* string_VkResolveModeFlagBits(VkResolveModeFlagBits input_value)
+{
+ switch ((VkResolveModeFlagBits)input_value)
+ {
+ case VK_RESOLVE_MODE_AVERAGE_BIT:
+ return "VK_RESOLVE_MODE_AVERAGE_BIT";
+ case VK_RESOLVE_MODE_MAX_BIT:
+ return "VK_RESOLVE_MODE_MAX_BIT";
+ case VK_RESOLVE_MODE_MIN_BIT:
+ return "VK_RESOLVE_MODE_MIN_BIT";
+ case VK_RESOLVE_MODE_NONE:
+ return "VK_RESOLVE_MODE_NONE";
+ case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT:
+ return "VK_RESOLVE_MODE_SAMPLE_ZERO_BIT";
+ default:
+ return "Unhandled VkResolveModeFlagBits";
+ }
+}
+
+static inline std::string string_VkResolveModeFlags(VkResolveModeFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkResolveModeFlagBits(static_cast<VkResolveModeFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkResolveModeFlagBits(static_cast<VkResolveModeFlagBits>(0)));
+ return ret;
+}
+
+static inline const char* string_VkDescriptorBindingFlagBits(VkDescriptorBindingFlagBits input_value)
+{
+ switch ((VkDescriptorBindingFlagBits)input_value)
+ {
+ case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT:
+ return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT";
+ case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT:
+ return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT";
+ case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT:
+ return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT";
+ case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT:
+ return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT";
+ default:
+ return "Unhandled VkDescriptorBindingFlagBits";
+ }
+}
+
+static inline std::string string_VkDescriptorBindingFlags(VkDescriptorBindingFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDescriptorBindingFlagBits(static_cast<VkDescriptorBindingFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDescriptorBindingFlagBits(static_cast<VkDescriptorBindingFlagBits>(0)));
+ return ret;
+}
+
+static inline const char* string_VkSamplerReductionMode(VkSamplerReductionMode input_value)
+{
+ switch ((VkSamplerReductionMode)input_value)
+ {
+ case VK_SAMPLER_REDUCTION_MODE_MAX:
+ return "VK_SAMPLER_REDUCTION_MODE_MAX";
+ case VK_SAMPLER_REDUCTION_MODE_MIN:
+ return "VK_SAMPLER_REDUCTION_MODE_MIN";
+ case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE:
+ return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE";
+ default:
+ return "Unhandled VkSamplerReductionMode";
+ }
+}
+
+static inline const char* string_VkSemaphoreType(VkSemaphoreType input_value)
+{
+ switch ((VkSemaphoreType)input_value)
+ {
+ case VK_SEMAPHORE_TYPE_BINARY:
+ return "VK_SEMAPHORE_TYPE_BINARY";
+ case VK_SEMAPHORE_TYPE_TIMELINE:
+ return "VK_SEMAPHORE_TYPE_TIMELINE";
+ default:
+ return "Unhandled VkSemaphoreType";
+ }
+}
+
+static inline const char* string_VkSemaphoreWaitFlagBits(VkSemaphoreWaitFlagBits input_value)
+{
+ switch ((VkSemaphoreWaitFlagBits)input_value)
+ {
+ case VK_SEMAPHORE_WAIT_ANY_BIT:
+ return "VK_SEMAPHORE_WAIT_ANY_BIT";
+ default:
+ return "Unhandled VkSemaphoreWaitFlagBits";
+ }
+}
+
+static inline std::string string_VkSemaphoreWaitFlags(VkSemaphoreWaitFlags input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSemaphoreWaitFlagBits(static_cast<VkSemaphoreWaitFlagBits>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSemaphoreWaitFlagBits(static_cast<VkSemaphoreWaitFlagBits>(0)));
+ return ret;
+}
+
static inline const char* string_VkSurfaceTransformFlagBitsKHR(VkSurfaceTransformFlagBitsKHR input_value)
{
switch ((VkSurfaceTransformFlagBitsKHR)input_value)
{
- case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR";
- case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR";
- case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR";
- case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR";
- case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR";
case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR:
return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR";
- case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
- return "VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR";
case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR:
return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR";
+ case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR:
+ return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR";
+ case VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR:
+ return "VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR";
+ case VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR:
+ return "VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR";
case VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR:
return "VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR";
+ case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
+ return "VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR";
+ case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
+ return "VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR";
+ case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
+ return "VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR";
default:
return "Unhandled VkSurfaceTransformFlagBitsKHR";
}
}
+static inline std::string string_VkSurfaceTransformFlagsKHR(VkSurfaceTransformFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast<VkSurfaceTransformFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSurfaceTransformFlagBitsKHR(static_cast<VkSurfaceTransformFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFlagBitsKHR input_value)
{
switch ((VkCompositeAlphaFlagBitsKHR)input_value)
@@ -2838,40 +4367,58 @@ static inline const char* string_VkCompositeAlphaFlagBitsKHR(VkCompositeAlphaFla
}
}
+static inline std::string string_VkCompositeAlphaFlagsKHR(VkCompositeAlphaFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast<VkCompositeAlphaFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkCompositeAlphaFlagBitsKHR(static_cast<VkCompositeAlphaFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkColorSpaceKHR(VkColorSpaceKHR input_value)
{
switch ((VkColorSpaceKHR)input_value)
{
- case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
- return "VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT";
- case VK_COLOR_SPACE_DCI_P3_LINEAR_EXT:
- return "VK_COLOR_SPACE_DCI_P3_LINEAR_EXT";
+ case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
+ return "VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT";
case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT:
return "VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT";
+ case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
+ return "VK_COLOR_SPACE_BT2020_LINEAR_EXT";
+ case VK_COLOR_SPACE_BT709_LINEAR_EXT:
+ return "VK_COLOR_SPACE_BT709_LINEAR_EXT";
case VK_COLOR_SPACE_BT709_NONLINEAR_EXT:
return "VK_COLOR_SPACE_BT709_NONLINEAR_EXT";
case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT:
return "VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT";
+ case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD:
+ return "VK_COLOR_SPACE_DISPLAY_NATIVE_AMD";
+ case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT:
+ return "VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT";
+ case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT:
+ return "VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT";
+ case VK_COLOR_SPACE_DOLBYVISION_EXT:
+ return "VK_COLOR_SPACE_DOLBYVISION_EXT";
+ case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
+ return "VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT";
case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT:
return "VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT";
case VK_COLOR_SPACE_HDR10_HLG_EXT:
return "VK_COLOR_SPACE_HDR10_HLG_EXT";
- case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
- return "VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT";
- case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT:
- return "VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT";
- case VK_COLOR_SPACE_PASS_THROUGH_EXT:
- return "VK_COLOR_SPACE_PASS_THROUGH_EXT";
case VK_COLOR_SPACE_HDR10_ST2084_EXT:
return "VK_COLOR_SPACE_HDR10_ST2084_EXT";
+ case VK_COLOR_SPACE_PASS_THROUGH_EXT:
+ return "VK_COLOR_SPACE_PASS_THROUGH_EXT";
case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
return "VK_COLOR_SPACE_SRGB_NONLINEAR_KHR";
- case VK_COLOR_SPACE_BT2020_LINEAR_EXT:
- return "VK_COLOR_SPACE_BT2020_LINEAR_EXT";
- case VK_COLOR_SPACE_BT709_LINEAR_EXT:
- return "VK_COLOR_SPACE_BT709_LINEAR_EXT";
- case VK_COLOR_SPACE_DOLBYVISION_EXT:
- return "VK_COLOR_SPACE_DOLBYVISION_EXT";
default:
return "Unhandled VkColorSpaceKHR";
}
@@ -2883,16 +4430,16 @@ static inline const char* string_VkPresentModeKHR(VkPresentModeKHR input_value)
{
case VK_PRESENT_MODE_FIFO_KHR:
return "VK_PRESENT_MODE_FIFO_KHR";
- case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
- return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR";
- case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
- return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR";
- case VK_PRESENT_MODE_MAILBOX_KHR:
- return "VK_PRESENT_MODE_MAILBOX_KHR";
- case VK_PRESENT_MODE_IMMEDIATE_KHR:
- return "VK_PRESENT_MODE_IMMEDIATE_KHR";
case VK_PRESENT_MODE_FIFO_RELAXED_KHR:
return "VK_PRESENT_MODE_FIFO_RELAXED_KHR";
+ case VK_PRESENT_MODE_IMMEDIATE_KHR:
+ return "VK_PRESENT_MODE_IMMEDIATE_KHR";
+ case VK_PRESENT_MODE_MAILBOX_KHR:
+ return "VK_PRESENT_MODE_MAILBOX_KHR";
+ case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR:
+ return "VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR";
+ case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR:
+ return "VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR";
default:
return "Unhandled VkPresentModeKHR";
}
@@ -2902,6 +4449,8 @@ static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateF
{
switch ((VkSwapchainCreateFlagBitsKHR)input_value)
{
+ case VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR:
+ return "VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR";
case VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR:
return "VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR";
case VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR:
@@ -2911,33 +4460,65 @@ static inline const char* string_VkSwapchainCreateFlagBitsKHR(VkSwapchainCreateF
}
}
+static inline std::string string_VkSwapchainCreateFlagsKHR(VkSwapchainCreateFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast<VkSwapchainCreateFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSwapchainCreateFlagBitsKHR(static_cast<VkSwapchainCreateFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkDeviceGroupPresentModeFlagBitsKHR(VkDeviceGroupPresentModeFlagBitsKHR input_value)
{
switch ((VkDeviceGroupPresentModeFlagBitsKHR)input_value)
{
- case VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR";
- case VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR";
- case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR:
- return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR";
case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR:
return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR";
+ case VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR:
+ return "VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR";
+ case VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR:
+ return "VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR";
+ case VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR:
+ return "VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR";
default:
return "Unhandled VkDeviceGroupPresentModeFlagBitsKHR";
}
}
+static inline std::string string_VkDeviceGroupPresentModeFlagsKHR(VkDeviceGroupPresentModeFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDeviceGroupPresentModeFlagBitsKHR(static_cast<VkDeviceGroupPresentModeFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkDisplayPlaneAlphaFlagBitsKHR(VkDisplayPlaneAlphaFlagBitsKHR input_value)
{
switch ((VkDisplayPlaneAlphaFlagBitsKHR)input_value)
{
- case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR:
- return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR";
case VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR:
return "VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR";
case VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR:
return "VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR";
+ case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR:
+ return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR";
case VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR:
return "VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR";
default:
@@ -2945,27 +4526,63 @@ static inline const char* string_VkDisplayPlaneAlphaFlagBitsKHR(VkDisplayPlaneAl
}
}
+static inline std::string string_VkDisplayPlaneAlphaFlagsKHR(VkDisplayPlaneAlphaFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast<VkDisplayPlaneAlphaFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDisplayPlaneAlphaFlagBitsKHR(static_cast<VkDisplayPlaneAlphaFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkPeerMemoryFeatureFlagBitsKHR(VkPeerMemoryFeatureFlagBitsKHR input_value)
{
switch ((VkPeerMemoryFeatureFlagBitsKHR)input_value)
{
- case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
+ case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
+ return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
case VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT:
return "VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT";
+ case VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT:
+ return "VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT";
case VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT:
return "VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT";
- case VK_PEER_MEMORY_FEATURE_COPY_DST_BIT:
- return "VK_PEER_MEMORY_FEATURE_COPY_DST_BIT";
default:
return "Unhandled VkPeerMemoryFeatureFlagBitsKHR";
}
}
+static inline std::string string_VkPeerMemoryFeatureFlagsKHR(VkPeerMemoryFeatureFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPeerMemoryFeatureFlagBitsKHR(static_cast<VkPeerMemoryFeatureFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPeerMemoryFeatureFlagBitsKHR(static_cast<VkPeerMemoryFeatureFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkMemoryAllocateFlagBitsKHR(VkMemoryAllocateFlagBitsKHR input_value)
{
switch ((VkMemoryAllocateFlagBitsKHR)input_value)
{
+ case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT:
+ return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT";
+ case VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT:
+ return "VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT";
case VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT:
return "VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT";
default:
@@ -2973,45 +4590,77 @@ static inline const char* string_VkMemoryAllocateFlagBitsKHR(VkMemoryAllocateFla
}
}
+static inline std::string string_VkMemoryAllocateFlagsKHR(VkMemoryAllocateFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkMemoryAllocateFlagBitsKHR(static_cast<VkMemoryAllocateFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkMemoryAllocateFlagBitsKHR(static_cast<VkMemoryAllocateFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalMemoryHandleTypeFlagBitsKHR(VkExternalMemoryHandleTypeFlagBitsKHR input_value)
{
switch ((VkExternalMemoryHandleTypeFlagBitsKHR)input_value)
{
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
default:
return "Unhandled VkExternalMemoryHandleTypeFlagBitsKHR";
}
}
+static inline std::string string_VkExternalMemoryHandleTypeFlagsKHR(VkExternalMemoryHandleTypeFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalMemoryHandleTypeFlagBitsKHR(static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBitsKHR(static_cast<VkExternalMemoryHandleTypeFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalMemoryFeatureFlagBitsKHR(VkExternalMemoryFeatureFlagBitsKHR input_value)
{
switch ((VkExternalMemoryFeatureFlagBitsKHR)input_value)
{
- case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
- return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT:
return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT";
+ case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT:
+ return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT";
case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT:
return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT";
default:
@@ -3019,25 +4668,57 @@ static inline const char* string_VkExternalMemoryFeatureFlagBitsKHR(VkExternalMe
}
}
+static inline std::string string_VkExternalMemoryFeatureFlagsKHR(VkExternalMemoryFeatureFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalMemoryFeatureFlagBitsKHR(static_cast<VkExternalMemoryFeatureFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBitsKHR(static_cast<VkExternalMemoryFeatureFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalSemaphoreHandleTypeFlagBitsKHR(VkExternalSemaphoreHandleTypeFlagBitsKHR input_value)
{
switch ((VkExternalSemaphoreHandleTypeFlagBitsKHR)input_value)
{
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
- case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
- return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT:
return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT";
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT";
+ case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
+ return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+ case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
+ return "VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT";
default:
return "Unhandled VkExternalSemaphoreHandleTypeFlagBitsKHR";
}
}
+static inline std::string string_VkExternalSemaphoreHandleTypeFlagsKHR(VkExternalSemaphoreHandleTypeFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalSemaphoreHandleTypeFlagBitsKHR(static_cast<VkExternalSemaphoreHandleTypeFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalSemaphoreHandleTypeFlagBitsKHR(static_cast<VkExternalSemaphoreHandleTypeFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalSemaphoreFeatureFlagBitsKHR(VkExternalSemaphoreFeatureFlagBitsKHR input_value)
{
switch ((VkExternalSemaphoreFeatureFlagBitsKHR)input_value)
@@ -3051,6 +4732,22 @@ static inline const char* string_VkExternalSemaphoreFeatureFlagBitsKHR(VkExterna
}
}
+static inline std::string string_VkExternalSemaphoreFeatureFlagsKHR(VkExternalSemaphoreFeatureFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalSemaphoreFeatureFlagBitsKHR(static_cast<VkExternalSemaphoreFeatureFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalSemaphoreFeatureFlagBitsKHR(static_cast<VkExternalSemaphoreFeatureFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkSemaphoreImportFlagBitsKHR(VkSemaphoreImportFlagBitsKHR input_value)
{
switch ((VkSemaphoreImportFlagBitsKHR)input_value)
@@ -3062,14 +4759,30 @@ static inline const char* string_VkSemaphoreImportFlagBitsKHR(VkSemaphoreImportF
}
}
+static inline std::string string_VkSemaphoreImportFlagsKHR(VkSemaphoreImportFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSemaphoreImportFlagBitsKHR(static_cast<VkSemaphoreImportFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSemaphoreImportFlagBitsKHR(static_cast<VkSemaphoreImportFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkDescriptorUpdateTemplateTypeKHR(VkDescriptorUpdateTemplateTypeKHR input_value)
{
switch ((VkDescriptorUpdateTemplateTypeKHR)input_value)
{
- case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
- return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET:
return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET";
+ case VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR:
+ return "VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR";
default:
return "Unhandled VkDescriptorUpdateTemplateTypeKHR";
}
@@ -3079,32 +4792,64 @@ static inline const char* string_VkExternalFenceHandleTypeFlagBitsKHR(VkExternal
{
switch ((VkExternalFenceHandleTypeFlagBitsKHR)input_value)
{
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
- case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
- return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT:
return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT";
case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT:
return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT";
+ case VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT:
+ return "VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT";
+ case VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT:
+ return "VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT";
default:
return "Unhandled VkExternalFenceHandleTypeFlagBitsKHR";
}
}
+static inline std::string string_VkExternalFenceHandleTypeFlagsKHR(VkExternalFenceHandleTypeFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalFenceHandleTypeFlagBitsKHR(static_cast<VkExternalFenceHandleTypeFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalFenceHandleTypeFlagBitsKHR(static_cast<VkExternalFenceHandleTypeFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalFenceFeatureFlagBitsKHR(VkExternalFenceFeatureFlagBitsKHR input_value)
{
switch ((VkExternalFenceFeatureFlagBitsKHR)input_value)
{
- case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
- return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
case VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT:
return "VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT";
+ case VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT:
+ return "VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT";
default:
return "Unhandled VkExternalFenceFeatureFlagBitsKHR";
}
}
+static inline std::string string_VkExternalFenceFeatureFlagsKHR(VkExternalFenceFeatureFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalFenceFeatureFlagBitsKHR(static_cast<VkExternalFenceFeatureFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalFenceFeatureFlagBitsKHR(static_cast<VkExternalFenceFeatureFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkFenceImportFlagBitsKHR(VkFenceImportFlagBitsKHR input_value)
{
switch ((VkFenceImportFlagBitsKHR)input_value)
@@ -3116,6 +4861,118 @@ static inline const char* string_VkFenceImportFlagBitsKHR(VkFenceImportFlagBitsK
}
}
+static inline std::string string_VkFenceImportFlagsKHR(VkFenceImportFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkFenceImportFlagBitsKHR(static_cast<VkFenceImportFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkFenceImportFlagBitsKHR(static_cast<VkFenceImportFlagBitsKHR>(0)));
+ return ret;
+}
+
+static inline const char* string_VkPerformanceCounterUnitKHR(VkPerformanceCounterUnitKHR input_value)
+{
+ switch ((VkPerformanceCounterUnitKHR)input_value)
+ {
+ case VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR";
+ case VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR:
+ return "VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR";
+ default:
+ return "Unhandled VkPerformanceCounterUnitKHR";
+ }
+}
+
+static inline const char* string_VkPerformanceCounterScopeKHR(VkPerformanceCounterScopeKHR input_value)
+{
+ switch ((VkPerformanceCounterScopeKHR)input_value)
+ {
+ case VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR:
+ return "VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR";
+ case VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR:
+ return "VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR";
+ case VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR:
+ return "VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR";
+ default:
+ return "Unhandled VkPerformanceCounterScopeKHR";
+ }
+}
+
+static inline const char* string_VkPerformanceCounterStorageKHR(VkPerformanceCounterStorageKHR input_value)
+{
+ switch ((VkPerformanceCounterStorageKHR)input_value)
+ {
+ case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR:
+ return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR";
+ case VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR:
+ return "VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR";
+ case VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR:
+ return "VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR";
+ case VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR:
+ return "VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR";
+ case VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR:
+ return "VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR";
+ case VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR:
+ return "VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR";
+ default:
+ return "Unhandled VkPerformanceCounterStorageKHR";
+ }
+}
+
+static inline const char* string_VkPerformanceCounterDescriptionFlagBitsKHR(VkPerformanceCounterDescriptionFlagBitsKHR input_value)
+{
+ switch ((VkPerformanceCounterDescriptionFlagBitsKHR)input_value)
+ {
+ case VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR:
+ return "VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR";
+ case VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR:
+ return "VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR";
+ default:
+ return "Unhandled VkPerformanceCounterDescriptionFlagBitsKHR";
+ }
+}
+
+static inline std::string string_VkPerformanceCounterDescriptionFlagsKHR(VkPerformanceCounterDescriptionFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPerformanceCounterDescriptionFlagBitsKHR(static_cast<VkPerformanceCounterDescriptionFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPerformanceCounterDescriptionFlagBitsKHR(static_cast<VkPerformanceCounterDescriptionFlagBitsKHR>(0)));
+ return ret;
+}
+
static inline const char* string_VkPointClippingBehaviorKHR(VkPointClippingBehaviorKHR input_value)
{
switch ((VkPointClippingBehaviorKHR)input_value)
@@ -3146,16 +5003,16 @@ static inline const char* string_VkSamplerYcbcrModelConversionKHR(VkSamplerYcbcr
{
switch ((VkSamplerYcbcrModelConversionKHR)input_value)
{
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
case VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY:
return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY";
- case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
- return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
+ case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020:
+ return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020";
case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601:
return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601";
+ case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709:
+ return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709";
+ case VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY:
+ return "VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY";
default:
return "Unhandled VkSamplerYcbcrModelConversionKHR";
}
@@ -3187,82 +5044,224 @@ static inline const char* string_VkChromaLocationKHR(VkChromaLocationKHR input_v
}
}
+static inline const char* string_VkDriverIdKHR(VkDriverIdKHR input_value)
+{
+ switch ((VkDriverIdKHR)input_value)
+ {
+ case VK_DRIVER_ID_AMD_OPEN_SOURCE:
+ return "VK_DRIVER_ID_AMD_OPEN_SOURCE";
+ case VK_DRIVER_ID_AMD_PROPRIETARY:
+ return "VK_DRIVER_ID_AMD_PROPRIETARY";
+ case VK_DRIVER_ID_ARM_PROPRIETARY:
+ return "VK_DRIVER_ID_ARM_PROPRIETARY";
+ case VK_DRIVER_ID_BROADCOM_PROPRIETARY:
+ return "VK_DRIVER_ID_BROADCOM_PROPRIETARY";
+ case VK_DRIVER_ID_GGP_PROPRIETARY:
+ return "VK_DRIVER_ID_GGP_PROPRIETARY";
+ case VK_DRIVER_ID_GOOGLE_SWIFTSHADER:
+ return "VK_DRIVER_ID_GOOGLE_SWIFTSHADER";
+ case VK_DRIVER_ID_IMAGINATION_PROPRIETARY:
+ return "VK_DRIVER_ID_IMAGINATION_PROPRIETARY";
+ case VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA:
+ return "VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA";
+ case VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS:
+ return "VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS";
+ case VK_DRIVER_ID_MESA_RADV:
+ return "VK_DRIVER_ID_MESA_RADV";
+ case VK_DRIVER_ID_NVIDIA_PROPRIETARY:
+ return "VK_DRIVER_ID_NVIDIA_PROPRIETARY";
+ case VK_DRIVER_ID_QUALCOMM_PROPRIETARY:
+ return "VK_DRIVER_ID_QUALCOMM_PROPRIETARY";
+ default:
+ return "Unhandled VkDriverIdKHR";
+ }
+}
+
+static inline const char* string_VkShaderFloatControlsIndependenceKHR(VkShaderFloatControlsIndependenceKHR input_value)
+{
+ switch ((VkShaderFloatControlsIndependenceKHR)input_value)
+ {
+ case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY:
+ return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY";
+ case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL:
+ return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL";
+ case VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE:
+ return "VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE";
+ default:
+ return "Unhandled VkShaderFloatControlsIndependenceKHR";
+ }
+}
+
+static inline const char* string_VkResolveModeFlagBitsKHR(VkResolveModeFlagBitsKHR input_value)
+{
+ switch ((VkResolveModeFlagBitsKHR)input_value)
+ {
+ case VK_RESOLVE_MODE_AVERAGE_BIT:
+ return "VK_RESOLVE_MODE_AVERAGE_BIT";
+ case VK_RESOLVE_MODE_MAX_BIT:
+ return "VK_RESOLVE_MODE_MAX_BIT";
+ case VK_RESOLVE_MODE_MIN_BIT:
+ return "VK_RESOLVE_MODE_MIN_BIT";
+ case VK_RESOLVE_MODE_NONE:
+ return "VK_RESOLVE_MODE_NONE";
+ case VK_RESOLVE_MODE_SAMPLE_ZERO_BIT:
+ return "VK_RESOLVE_MODE_SAMPLE_ZERO_BIT";
+ default:
+ return "Unhandled VkResolveModeFlagBitsKHR";
+ }
+}
+
+static inline std::string string_VkResolveModeFlagsKHR(VkResolveModeFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkResolveModeFlagBitsKHR(static_cast<VkResolveModeFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkResolveModeFlagBitsKHR(static_cast<VkResolveModeFlagBitsKHR>(0)));
+ return ret;
+}
+
+static inline const char* string_VkSemaphoreTypeKHR(VkSemaphoreTypeKHR input_value)
+{
+ switch ((VkSemaphoreTypeKHR)input_value)
+ {
+ case VK_SEMAPHORE_TYPE_BINARY:
+ return "VK_SEMAPHORE_TYPE_BINARY";
+ case VK_SEMAPHORE_TYPE_TIMELINE:
+ return "VK_SEMAPHORE_TYPE_TIMELINE";
+ default:
+ return "Unhandled VkSemaphoreTypeKHR";
+ }
+}
+
+static inline const char* string_VkSemaphoreWaitFlagBitsKHR(VkSemaphoreWaitFlagBitsKHR input_value)
+{
+ switch ((VkSemaphoreWaitFlagBitsKHR)input_value)
+ {
+ case VK_SEMAPHORE_WAIT_ANY_BIT:
+ return "VK_SEMAPHORE_WAIT_ANY_BIT";
+ default:
+ return "Unhandled VkSemaphoreWaitFlagBitsKHR";
+ }
+}
+
+static inline std::string string_VkSemaphoreWaitFlagsKHR(VkSemaphoreWaitFlagsKHR input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSemaphoreWaitFlagBitsKHR(static_cast<VkSemaphoreWaitFlagBitsKHR>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSemaphoreWaitFlagBitsKHR(static_cast<VkSemaphoreWaitFlagBitsKHR>(0)));
+ return ret;
+}
+
+static inline const char* string_VkPipelineExecutableStatisticFormatKHR(VkPipelineExecutableStatisticFormatKHR input_value)
+{
+ switch ((VkPipelineExecutableStatisticFormatKHR)input_value)
+ {
+ case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR:
+ return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR";
+ case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR:
+ return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR";
+ case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR:
+ return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR";
+ case VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR:
+ return "VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR";
+ default:
+ return "Unhandled VkPipelineExecutableStatisticFormatKHR";
+ }
+}
+
static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value)
{
switch ((VkDebugReportObjectTypeEXT)input_value)
{
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT";
case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT";
default:
return "Unhandled VkDebugReportObjectTypeEXT";
}
@@ -3278,23 +5277,39 @@ static inline const char* string_VkDebugReportFlagBitsEXT(VkDebugReportFlagBitsE
return "VK_DEBUG_REPORT_ERROR_BIT_EXT";
case VK_DEBUG_REPORT_INFORMATION_BIT_EXT:
return "VK_DEBUG_REPORT_INFORMATION_BIT_EXT";
- case VK_DEBUG_REPORT_WARNING_BIT_EXT:
- return "VK_DEBUG_REPORT_WARNING_BIT_EXT";
case VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT:
return "VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT";
+ case VK_DEBUG_REPORT_WARNING_BIT_EXT:
+ return "VK_DEBUG_REPORT_WARNING_BIT_EXT";
default:
return "Unhandled VkDebugReportFlagBitsEXT";
}
}
+static inline std::string string_VkDebugReportFlagsEXT(VkDebugReportFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDebugReportFlagBitsEXT(static_cast<VkDebugReportFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDebugReportFlagBitsEXT(static_cast<VkDebugReportFlagBitsEXT>(0)));
+ return ret;
+}
+
static inline const char* string_VkRasterizationOrderAMD(VkRasterizationOrderAMD input_value)
{
switch ((VkRasterizationOrderAMD)input_value)
{
- case VK_RASTERIZATION_ORDER_STRICT_AMD:
- return "VK_RASTERIZATION_ORDER_STRICT_AMD";
case VK_RASTERIZATION_ORDER_RELAXED_AMD:
return "VK_RASTERIZATION_ORDER_RELAXED_AMD";
+ case VK_RASTERIZATION_ORDER_STRICT_AMD:
+ return "VK_RASTERIZATION_ORDER_STRICT_AMD";
default:
return "Unhandled VkRasterizationOrderAMD";
}
@@ -3304,12 +5319,12 @@ static inline const char* string_VkShaderInfoTypeAMD(VkShaderInfoTypeAMD input_v
{
switch ((VkShaderInfoTypeAMD)input_value)
{
- case VK_SHADER_INFO_TYPE_STATISTICS_AMD:
- return "VK_SHADER_INFO_TYPE_STATISTICS_AMD";
case VK_SHADER_INFO_TYPE_BINARY_AMD:
return "VK_SHADER_INFO_TYPE_BINARY_AMD";
case VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD:
return "VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD";
+ case VK_SHADER_INFO_TYPE_STATISTICS_AMD:
+ return "VK_SHADER_INFO_TYPE_STATISTICS_AMD";
default:
return "Unhandled VkShaderInfoTypeAMD";
}
@@ -3319,97 +5334,188 @@ static inline const char* string_VkExternalMemoryHandleTypeFlagBitsNV(VkExternal
{
switch ((VkExternalMemoryHandleTypeFlagBitsNV)input_value)
{
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV";
- case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV:
- return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV";
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV:
return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV";
+ case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV:
+ return "VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV";
default:
return "Unhandled VkExternalMemoryHandleTypeFlagBitsNV";
}
}
+static inline std::string string_VkExternalMemoryHandleTypeFlagsNV(VkExternalMemoryHandleTypeFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast<VkExternalMemoryHandleTypeFlagBitsNV>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalMemoryHandleTypeFlagBitsNV(static_cast<VkExternalMemoryHandleTypeFlagBitsNV>(0)));
+ return ret;
+}
+
static inline const char* string_VkExternalMemoryFeatureFlagBitsNV(VkExternalMemoryFeatureFlagBitsNV input_value)
{
switch ((VkExternalMemoryFeatureFlagBitsNV)input_value)
{
- case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV:
- return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV";
- case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV:
- return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV";
case VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV:
return "VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV";
+ case VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV:
+ return "VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV";
+ case VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV:
+ return "VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV";
default:
return "Unhandled VkExternalMemoryFeatureFlagBitsNV";
}
}
+static inline std::string string_VkExternalMemoryFeatureFlagsNV(VkExternalMemoryFeatureFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast<VkExternalMemoryFeatureFlagBitsNV>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkExternalMemoryFeatureFlagBitsNV(static_cast<VkExternalMemoryFeatureFlagBitsNV>(0)));
+ return ret;
+}
+
static inline const char* string_VkValidationCheckEXT(VkValidationCheckEXT input_value)
{
switch ((VkValidationCheckEXT)input_value)
{
- case VK_VALIDATION_CHECK_SHADERS_EXT:
- return "VK_VALIDATION_CHECK_SHADERS_EXT";
case VK_VALIDATION_CHECK_ALL_EXT:
return "VK_VALIDATION_CHECK_ALL_EXT";
+ case VK_VALIDATION_CHECK_SHADERS_EXT:
+ return "VK_VALIDATION_CHECK_SHADERS_EXT";
default:
return "Unhandled VkValidationCheckEXT";
}
}
+static inline const char* string_VkConditionalRenderingFlagBitsEXT(VkConditionalRenderingFlagBitsEXT input_value)
+{
+ switch ((VkConditionalRenderingFlagBitsEXT)input_value)
+ {
+ case VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT:
+ return "VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT";
+ default:
+ return "Unhandled VkConditionalRenderingFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkConditionalRenderingFlagsEXT(VkConditionalRenderingFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast<VkConditionalRenderingFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkConditionalRenderingFlagBitsEXT(static_cast<VkConditionalRenderingFlagBitsEXT>(0)));
+ return ret;
+}
+
static inline const char* string_VkIndirectCommandsLayoutUsageFlagBitsNVX(VkIndirectCommandsLayoutUsageFlagBitsNVX input_value)
{
switch ((VkIndirectCommandsLayoutUsageFlagBitsNVX)input_value)
{
+ case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX:
+ return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX";
+ case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX:
+ return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX";
case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX:
return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX";
case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX:
return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX";
- case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX:
- return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX";
- case VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX:
- return "VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX";
default:
return "Unhandled VkIndirectCommandsLayoutUsageFlagBitsNVX";
}
}
+static inline std::string string_VkIndirectCommandsLayoutUsageFlagsNVX(VkIndirectCommandsLayoutUsageFlagsNVX input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast<VkIndirectCommandsLayoutUsageFlagBitsNVX>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkIndirectCommandsLayoutUsageFlagBitsNVX(static_cast<VkIndirectCommandsLayoutUsageFlagBitsNVX>(0)));
+ return ret;
+}
+
static inline const char* string_VkObjectEntryUsageFlagBitsNVX(VkObjectEntryUsageFlagBitsNVX input_value)
{
switch ((VkObjectEntryUsageFlagBitsNVX)input_value)
{
- case VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX:
- return "VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX";
case VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX:
return "VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX";
+ case VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX:
+ return "VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX";
default:
return "Unhandled VkObjectEntryUsageFlagBitsNVX";
}
}
+static inline std::string string_VkObjectEntryUsageFlagsNVX(VkObjectEntryUsageFlagsNVX input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast<VkObjectEntryUsageFlagBitsNVX>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkObjectEntryUsageFlagBitsNVX(static_cast<VkObjectEntryUsageFlagBitsNVX>(0)));
+ return ret;
+}
+
static inline const char* string_VkIndirectCommandsTokenTypeNVX(VkIndirectCommandsTokenTypeNVX input_value)
{
switch ((VkIndirectCommandsTokenTypeNVX)input_value)
{
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX";
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX:
return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX";
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX:
return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX";
- case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX:
- return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX";
+ case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX:
+ return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX";
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX:
return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX";
case VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX:
return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX";
+ case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX:
+ return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX";
+ case VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX:
+ return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX";
+ case VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX:
+ return "VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX";
default:
return "Unhandled VkIndirectCommandsTokenTypeNVX";
}
@@ -3419,16 +5525,16 @@ static inline const char* string_VkObjectEntryTypeNVX(VkObjectEntryTypeNVX input
{
switch ((VkObjectEntryTypeNVX)input_value)
{
- case VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX:
- return "VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX";
case VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX:
return "VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX";
- case VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX:
- return "VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX";
+ case VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX:
+ return "VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX";
case VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX:
return "VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX";
case VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX:
return "VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX";
+ case VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX:
+ return "VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX";
default:
return "Unhandled VkObjectEntryTypeNVX";
}
@@ -3445,16 +5551,32 @@ static inline const char* string_VkSurfaceCounterFlagBitsEXT(VkSurfaceCounterFla
}
}
+static inline std::string string_VkSurfaceCounterFlagsEXT(VkSurfaceCounterFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast<VkSurfaceCounterFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkSurfaceCounterFlagBitsEXT(static_cast<VkSurfaceCounterFlagBitsEXT>(0)));
+ return ret;
+}
+
static inline const char* string_VkDisplayPowerStateEXT(VkDisplayPowerStateEXT input_value)
{
switch ((VkDisplayPowerStateEXT)input_value)
{
- case VK_DISPLAY_POWER_STATE_SUSPEND_EXT:
- return "VK_DISPLAY_POWER_STATE_SUSPEND_EXT";
- case VK_DISPLAY_POWER_STATE_ON_EXT:
- return "VK_DISPLAY_POWER_STATE_ON_EXT";
case VK_DISPLAY_POWER_STATE_OFF_EXT:
return "VK_DISPLAY_POWER_STATE_OFF_EXT";
+ case VK_DISPLAY_POWER_STATE_ON_EXT:
+ return "VK_DISPLAY_POWER_STATE_ON_EXT";
+ case VK_DISPLAY_POWER_STATE_SUSPEND_EXT:
+ return "VK_DISPLAY_POWER_STATE_SUSPEND_EXT";
default:
return "Unhandled VkDisplayPowerStateEXT";
}
@@ -3486,22 +5608,22 @@ static inline const char* string_VkViewportCoordinateSwizzleNV(VkViewportCoordin
{
switch ((VkViewportCoordinateSwizzleNV)input_value)
{
+ case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV:
+ return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV";
+ case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV:
+ return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV";
case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV:
return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV";
case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV:
return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV";
case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV:
return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV";
- case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV:
- return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV";
+ case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV:
+ return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV";
case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV:
return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV";
+ case VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV:
+ return "VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV";
default:
return "Unhandled VkViewportCoordinateSwizzleNV";
}
@@ -3524,10 +5646,10 @@ static inline const char* string_VkConservativeRasterizationModeEXT(VkConservati
{
switch ((VkConservativeRasterizationModeEXT)input_value)
{
- case VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT:
- return "VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT";
case VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT:
return "VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT";
+ case VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT:
+ return "VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT";
case VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT:
return "VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT";
default:
@@ -3543,40 +5665,72 @@ static inline const char* string_VkDebugUtilsMessageSeverityFlagBitsEXT(VkDebugU
return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT";
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT";
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT";
+ case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
+ return "VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT";
default:
return "Unhandled VkDebugUtilsMessageSeverityFlagBitsEXT";
}
}
+static inline std::string string_VkDebugUtilsMessageSeverityFlagsEXT(VkDebugUtilsMessageSeverityFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDebugUtilsMessageSeverityFlagBitsEXT(static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>(0)));
+ return ret;
+}
+
static inline const char* string_VkDebugUtilsMessageTypeFlagBitsEXT(VkDebugUtilsMessageTypeFlagBitsEXT input_value)
{
switch ((VkDebugUtilsMessageTypeFlagBitsEXT)input_value)
{
+ case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
+ return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT";
case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
return "VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT";
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT:
return "VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT";
- case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
- return "VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT";
default:
return "Unhandled VkDebugUtilsMessageTypeFlagBitsEXT";
}
}
+static inline std::string string_VkDebugUtilsMessageTypeFlagsEXT(VkDebugUtilsMessageTypeFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast<VkDebugUtilsMessageTypeFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDebugUtilsMessageTypeFlagBitsEXT(static_cast<VkDebugUtilsMessageTypeFlagBitsEXT>(0)));
+ return ret;
+}
+
static inline const char* string_VkSamplerReductionModeEXT(VkSamplerReductionModeEXT input_value)
{
switch ((VkSamplerReductionModeEXT)input_value)
{
- case VK_SAMPLER_REDUCTION_MODE_MAX_EXT:
- return "VK_SAMPLER_REDUCTION_MODE_MAX_EXT";
- case VK_SAMPLER_REDUCTION_MODE_MIN_EXT:
- return "VK_SAMPLER_REDUCTION_MODE_MIN_EXT";
- case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT:
- return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT";
+ case VK_SAMPLER_REDUCTION_MODE_MAX:
+ return "VK_SAMPLER_REDUCTION_MODE_MAX";
+ case VK_SAMPLER_REDUCTION_MODE_MIN:
+ return "VK_SAMPLER_REDUCTION_MODE_MIN";
+ case VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE:
+ return "VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE";
default:
return "Unhandled VkSamplerReductionModeEXT";
}
@@ -3586,12 +5740,12 @@ static inline const char* string_VkBlendOverlapEXT(VkBlendOverlapEXT input_value
{
switch ((VkBlendOverlapEXT)input_value)
{
+ case VK_BLEND_OVERLAP_CONJOINT_EXT:
+ return "VK_BLEND_OVERLAP_CONJOINT_EXT";
case VK_BLEND_OVERLAP_DISJOINT_EXT:
return "VK_BLEND_OVERLAP_DISJOINT_EXT";
case VK_BLEND_OVERLAP_UNCORRELATED_EXT:
return "VK_BLEND_OVERLAP_UNCORRELATED_EXT";
- case VK_BLEND_OVERLAP_CONJOINT_EXT:
- return "VK_BLEND_OVERLAP_CONJOINT_EXT";
default:
return "Unhandled VkBlendOverlapEXT";
}
@@ -3601,14 +5755,14 @@ static inline const char* string_VkCoverageModulationModeNV(VkCoverageModulation
{
switch ((VkCoverageModulationModeNV)input_value)
{
- case VK_COVERAGE_MODULATION_MODE_RGBA_NV:
- return "VK_COVERAGE_MODULATION_MODE_RGBA_NV";
case VK_COVERAGE_MODULATION_MODE_ALPHA_NV:
return "VK_COVERAGE_MODULATION_MODE_ALPHA_NV";
- case VK_COVERAGE_MODULATION_MODE_RGB_NV:
- return "VK_COVERAGE_MODULATION_MODE_RGB_NV";
case VK_COVERAGE_MODULATION_MODE_NONE_NV:
return "VK_COVERAGE_MODULATION_MODE_NONE_NV";
+ case VK_COVERAGE_MODULATION_MODE_RGBA_NV:
+ return "VK_COVERAGE_MODULATION_MODE_RGBA_NV";
+ case VK_COVERAGE_MODULATION_MODE_RGB_NV:
+ return "VK_COVERAGE_MODULATION_MODE_RGB_NV";
default:
return "Unhandled VkCoverageModulationModeNV";
}
@@ -3629,27 +5783,259 @@ static inline const char* string_VkDescriptorBindingFlagBitsEXT(VkDescriptorBind
{
switch ((VkDescriptorBindingFlagBitsEXT)input_value)
{
- case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT";
- case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT";
- case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT";
- case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT:
- return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT";
+ case VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT:
+ return "VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT";
+ case VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT:
+ return "VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT";
+ case VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT:
+ return "VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT";
+ case VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT:
+ return "VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT";
default:
return "Unhandled VkDescriptorBindingFlagBitsEXT";
}
}
+static inline std::string string_VkDescriptorBindingFlagsEXT(VkDescriptorBindingFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkDescriptorBindingFlagBitsEXT(static_cast<VkDescriptorBindingFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkDescriptorBindingFlagBitsEXT(static_cast<VkDescriptorBindingFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkShadingRatePaletteEntryNV(VkShadingRatePaletteEntryNV input_value)
+{
+ switch ((VkShadingRatePaletteEntryNV)input_value)
+ {
+ case VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV";
+ case VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV:
+ return "VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV";
+ default:
+ return "Unhandled VkShadingRatePaletteEntryNV";
+ }
+}
+
+static inline const char* string_VkCoarseSampleOrderTypeNV(VkCoarseSampleOrderTypeNV input_value)
+{
+ switch ((VkCoarseSampleOrderTypeNV)input_value)
+ {
+ case VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV:
+ return "VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV";
+ case VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV:
+ return "VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV";
+ case VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV:
+ return "VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV";
+ case VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV:
+ return "VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV";
+ default:
+ return "Unhandled VkCoarseSampleOrderTypeNV";
+ }
+}
+
+static inline const char* string_VkAccelerationStructureTypeNV(VkAccelerationStructureTypeNV input_value)
+{
+ switch ((VkAccelerationStructureTypeNV)input_value)
+ {
+ case VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV:
+ return "VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV";
+ case VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV:
+ return "VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV";
+ default:
+ return "Unhandled VkAccelerationStructureTypeNV";
+ }
+}
+
+static inline const char* string_VkRayTracingShaderGroupTypeNV(VkRayTracingShaderGroupTypeNV input_value)
+{
+ switch ((VkRayTracingShaderGroupTypeNV)input_value)
+ {
+ case VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV:
+ return "VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV";
+ case VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV:
+ return "VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV";
+ case VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV:
+ return "VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV";
+ default:
+ return "Unhandled VkRayTracingShaderGroupTypeNV";
+ }
+}
+
+static inline const char* string_VkGeometryTypeNV(VkGeometryTypeNV input_value)
+{
+ switch ((VkGeometryTypeNV)input_value)
+ {
+ case VK_GEOMETRY_TYPE_AABBS_NV:
+ return "VK_GEOMETRY_TYPE_AABBS_NV";
+ case VK_GEOMETRY_TYPE_TRIANGLES_NV:
+ return "VK_GEOMETRY_TYPE_TRIANGLES_NV";
+ default:
+ return "Unhandled VkGeometryTypeNV";
+ }
+}
+
+static inline const char* string_VkGeometryFlagBitsNV(VkGeometryFlagBitsNV input_value)
+{
+ switch ((VkGeometryFlagBitsNV)input_value)
+ {
+ case VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV:
+ return "VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV";
+ case VK_GEOMETRY_OPAQUE_BIT_NV:
+ return "VK_GEOMETRY_OPAQUE_BIT_NV";
+ default:
+ return "Unhandled VkGeometryFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkGeometryFlagsNV(VkGeometryFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkGeometryFlagBitsNV(static_cast<VkGeometryFlagBitsNV>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkGeometryFlagBitsNV(static_cast<VkGeometryFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkGeometryInstanceFlagBitsNV(VkGeometryInstanceFlagBitsNV input_value)
+{
+ switch ((VkGeometryInstanceFlagBitsNV)input_value)
+ {
+ case VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV:
+ return "VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV";
+ case VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV:
+ return "VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV";
+ case VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV:
+ return "VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV";
+ case VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV:
+ return "VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV";
+ default:
+ return "Unhandled VkGeometryInstanceFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkGeometryInstanceFlagsNV(VkGeometryInstanceFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkGeometryInstanceFlagBitsNV(static_cast<VkGeometryInstanceFlagBitsNV>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkGeometryInstanceFlagBitsNV(static_cast<VkGeometryInstanceFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkBuildAccelerationStructureFlagBitsNV(VkBuildAccelerationStructureFlagBitsNV input_value)
+{
+ switch ((VkBuildAccelerationStructureFlagBitsNV)input_value)
+ {
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV";
+ case VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV";
+ case VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV";
+ case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV";
+ case VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV:
+ return "VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV";
+ default:
+ return "Unhandled VkBuildAccelerationStructureFlagBitsNV";
+ }
+}
+
+static inline std::string string_VkBuildAccelerationStructureFlagsNV(VkBuildAccelerationStructureFlagsNV input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkBuildAccelerationStructureFlagBitsNV(static_cast<VkBuildAccelerationStructureFlagBitsNV>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkBuildAccelerationStructureFlagBitsNV(static_cast<VkBuildAccelerationStructureFlagBitsNV>(0)));
+ return ret;
+}
+
+static inline const char* string_VkCopyAccelerationStructureModeNV(VkCopyAccelerationStructureModeNV input_value)
+{
+ switch ((VkCopyAccelerationStructureModeNV)input_value)
+ {
+ case VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV:
+ return "VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV";
+ case VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV:
+ return "VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV";
+ default:
+ return "Unhandled VkCopyAccelerationStructureModeNV";
+ }
+}
+
+static inline const char* string_VkAccelerationStructureMemoryRequirementsTypeNV(VkAccelerationStructureMemoryRequirementsTypeNV input_value)
+{
+ switch ((VkAccelerationStructureMemoryRequirementsTypeNV)input_value)
+ {
+ case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV:
+ return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV";
+ case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV:
+ return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV";
+ case VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV:
+ return "VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV";
+ default:
+ return "Unhandled VkAccelerationStructureMemoryRequirementsTypeNV";
+ }
+}
+
static inline const char* string_VkQueueGlobalPriorityEXT(VkQueueGlobalPriorityEXT input_value)
{
switch ((VkQueueGlobalPriorityEXT)input_value)
{
- case VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT:
- return "VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT";
case VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT:
return "VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT";
+ case VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT:
+ return "VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT";
case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT:
return "VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT";
case VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT:
@@ -3659,6 +6045,312 @@ static inline const char* string_VkQueueGlobalPriorityEXT(VkQueueGlobalPriorityE
}
}
+static inline const char* string_VkTimeDomainEXT(VkTimeDomainEXT input_value)
+{
+ switch ((VkTimeDomainEXT)input_value)
+ {
+ case VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT:
+ return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT";
+ case VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT:
+ return "VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT";
+ case VK_TIME_DOMAIN_DEVICE_EXT:
+ return "VK_TIME_DOMAIN_DEVICE_EXT";
+ case VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT:
+ return "VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT";
+ default:
+ return "Unhandled VkTimeDomainEXT";
+ }
+}
+
+static inline const char* string_VkMemoryOverallocationBehaviorAMD(VkMemoryOverallocationBehaviorAMD input_value)
+{
+ switch ((VkMemoryOverallocationBehaviorAMD)input_value)
+ {
+ case VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD:
+ return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD";
+ case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD:
+ return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD";
+ case VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD:
+ return "VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD";
+ default:
+ return "Unhandled VkMemoryOverallocationBehaviorAMD";
+ }
+}
+
+static inline const char* string_VkPipelineCreationFeedbackFlagBitsEXT(VkPipelineCreationFeedbackFlagBitsEXT input_value)
+{
+ switch ((VkPipelineCreationFeedbackFlagBitsEXT)input_value)
+ {
+ case VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT:
+ return "VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT";
+ case VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT:
+ return "VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT";
+ case VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT:
+ return "VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT";
+ default:
+ return "Unhandled VkPipelineCreationFeedbackFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkPipelineCreationFeedbackFlagsEXT(VkPipelineCreationFeedbackFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkPipelineCreationFeedbackFlagBitsEXT(static_cast<VkPipelineCreationFeedbackFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkPipelineCreationFeedbackFlagBitsEXT(static_cast<VkPipelineCreationFeedbackFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkPerformanceConfigurationTypeINTEL(VkPerformanceConfigurationTypeINTEL input_value)
+{
+ switch ((VkPerformanceConfigurationTypeINTEL)input_value)
+ {
+ case VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL:
+ return "VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL";
+ default:
+ return "Unhandled VkPerformanceConfigurationTypeINTEL";
+ }
+}
+
+static inline const char* string_VkQueryPoolSamplingModeINTEL(VkQueryPoolSamplingModeINTEL input_value)
+{
+ switch ((VkQueryPoolSamplingModeINTEL)input_value)
+ {
+ case VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL:
+ return "VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL";
+ default:
+ return "Unhandled VkQueryPoolSamplingModeINTEL";
+ }
+}
+
+static inline const char* string_VkPerformanceOverrideTypeINTEL(VkPerformanceOverrideTypeINTEL input_value)
+{
+ switch ((VkPerformanceOverrideTypeINTEL)input_value)
+ {
+ case VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL:
+ return "VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL";
+ case VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL:
+ return "VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL";
+ default:
+ return "Unhandled VkPerformanceOverrideTypeINTEL";
+ }
+}
+
+static inline const char* string_VkPerformanceParameterTypeINTEL(VkPerformanceParameterTypeINTEL input_value)
+{
+ switch ((VkPerformanceParameterTypeINTEL)input_value)
+ {
+ case VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL:
+ return "VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL";
+ case VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL:
+ return "VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL";
+ default:
+ return "Unhandled VkPerformanceParameterTypeINTEL";
+ }
+}
+
+static inline const char* string_VkPerformanceValueTypeINTEL(VkPerformanceValueTypeINTEL input_value)
+{
+ switch ((VkPerformanceValueTypeINTEL)input_value)
+ {
+ case VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL:
+ return "VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL";
+ case VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL:
+ return "VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL";
+ case VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL:
+ return "VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL";
+ case VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL:
+ return "VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL";
+ case VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL:
+ return "VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL";
+ default:
+ return "Unhandled VkPerformanceValueTypeINTEL";
+ }
+}
+
+static inline const char* string_VkToolPurposeFlagBitsEXT(VkToolPurposeFlagBitsEXT input_value)
+{
+ switch ((VkToolPurposeFlagBitsEXT)input_value)
+ {
+ case VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT:
+ return "VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT";
+ case VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT:
+ return "VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT";
+ case VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT:
+ return "VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT";
+ case VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT:
+ return "VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT";
+ case VK_TOOL_PURPOSE_PROFILING_BIT_EXT:
+ return "VK_TOOL_PURPOSE_PROFILING_BIT_EXT";
+ case VK_TOOL_PURPOSE_TRACING_BIT_EXT:
+ return "VK_TOOL_PURPOSE_TRACING_BIT_EXT";
+ case VK_TOOL_PURPOSE_VALIDATION_BIT_EXT:
+ return "VK_TOOL_PURPOSE_VALIDATION_BIT_EXT";
+ default:
+ return "Unhandled VkToolPurposeFlagBitsEXT";
+ }
+}
+
+static inline std::string string_VkToolPurposeFlagsEXT(VkToolPurposeFlagsEXT input_value)
+{
+ std::string ret;
+ int index = 0;
+ while(input_value) {
+ if (input_value & 1) {
+ if( !ret.empty()) ret.append("|");
+ ret.append(string_VkToolPurposeFlagBitsEXT(static_cast<VkToolPurposeFlagBitsEXT>(1 << index)));
+ }
+ ++index;
+ input_value >>= 1;
+ }
+ if( ret.empty()) ret.append(string_VkToolPurposeFlagBitsEXT(static_cast<VkToolPurposeFlagBitsEXT>(0)));
+ return ret;
+}
+
+static inline const char* string_VkValidationFeatureEnableEXT(VkValidationFeatureEnableEXT input_value)
+{
+ switch ((VkValidationFeatureEnableEXT)input_value)
+ {
+ case VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT:
+ return "VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT";
+ case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT:
+ return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT";
+ case VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT:
+ return "VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT";
+ default:
+ return "Unhandled VkValidationFeatureEnableEXT";
+ }
+}
+
+static inline const char* string_VkValidationFeatureDisableEXT(VkValidationFeatureDisableEXT input_value)
+{
+ switch ((VkValidationFeatureDisableEXT)input_value)
+ {
+ case VK_VALIDATION_FEATURE_DISABLE_ALL_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_ALL_EXT";
+ case VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT";
+ case VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT";
+ case VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT";
+ case VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT";
+ case VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT";
+ case VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT:
+ return "VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT";
+ default:
+ return "Unhandled VkValidationFeatureDisableEXT";
+ }
+}
+
+static inline const char* string_VkComponentTypeNV(VkComponentTypeNV input_value)
+{
+ switch ((VkComponentTypeNV)input_value)
+ {
+ case VK_COMPONENT_TYPE_FLOAT16_NV:
+ return "VK_COMPONENT_TYPE_FLOAT16_NV";
+ case VK_COMPONENT_TYPE_FLOAT32_NV:
+ return "VK_COMPONENT_TYPE_FLOAT32_NV";
+ case VK_COMPONENT_TYPE_FLOAT64_NV:
+ return "VK_COMPONENT_TYPE_FLOAT64_NV";
+ case VK_COMPONENT_TYPE_SINT16_NV:
+ return "VK_COMPONENT_TYPE_SINT16_NV";
+ case VK_COMPONENT_TYPE_SINT32_NV:
+ return "VK_COMPONENT_TYPE_SINT32_NV";
+ case VK_COMPONENT_TYPE_SINT64_NV:
+ return "VK_COMPONENT_TYPE_SINT64_NV";
+ case VK_COMPONENT_TYPE_SINT8_NV:
+ return "VK_COMPONENT_TYPE_SINT8_NV";
+ case VK_COMPONENT_TYPE_UINT16_NV:
+ return "VK_COMPONENT_TYPE_UINT16_NV";
+ case VK_COMPONENT_TYPE_UINT32_NV:
+ return "VK_COMPONENT_TYPE_UINT32_NV";
+ case VK_COMPONENT_TYPE_UINT64_NV:
+ return "VK_COMPONENT_TYPE_UINT64_NV";
+ case VK_COMPONENT_TYPE_UINT8_NV:
+ return "VK_COMPONENT_TYPE_UINT8_NV";
+ default:
+ return "Unhandled VkComponentTypeNV";
+ }
+}
+
+static inline const char* string_VkScopeNV(VkScopeNV input_value)
+{
+ switch ((VkScopeNV)input_value)
+ {
+ case VK_SCOPE_DEVICE_NV:
+ return "VK_SCOPE_DEVICE_NV";
+ case VK_SCOPE_QUEUE_FAMILY_NV:
+ return "VK_SCOPE_QUEUE_FAMILY_NV";
+ case VK_SCOPE_SUBGROUP_NV:
+ return "VK_SCOPE_SUBGROUP_NV";
+ case VK_SCOPE_WORKGROUP_NV:
+ return "VK_SCOPE_WORKGROUP_NV";
+ default:
+ return "Unhandled VkScopeNV";
+ }
+}
+
+static inline const char* string_VkCoverageReductionModeNV(VkCoverageReductionModeNV input_value)
+{
+ switch ((VkCoverageReductionModeNV)input_value)
+ {
+ case VK_COVERAGE_REDUCTION_MODE_MERGE_NV:
+ return "VK_COVERAGE_REDUCTION_MODE_MERGE_NV";
+ case VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV:
+ return "VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV";
+ default:
+ return "Unhandled VkCoverageReductionModeNV";
+ }
+}
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+
+static inline const char* string_VkFullScreenExclusiveEXT(VkFullScreenExclusiveEXT input_value)
+{
+ switch ((VkFullScreenExclusiveEXT)input_value)
+ {
+ case VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT:
+ return "VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT";
+ case VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT:
+ return "VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT";
+ case VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT:
+ return "VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT";
+ case VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT:
+ return "VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT";
+ default:
+ return "Unhandled VkFullScreenExclusiveEXT";
+ }
+}
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+static inline const char* string_VkLineRasterizationModeEXT(VkLineRasterizationModeEXT input_value)
+{
+ switch ((VkLineRasterizationModeEXT)input_value)
+ {
+ case VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT:
+ return "VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT";
+ case VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT:
+ return "VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT";
+ case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT:
+ return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT";
+ case VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT:
+ return "VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT";
+ default:
+ return "Unhandled VkLineRasterizationModeEXT";
+ }
+}
+
static inline const char * GetPhysDevFeatureString(uint32_t index) {
const char * IndexToPhysDevFeatureString[] = {
"robustBufferAccess",
diff --git a/thirdparty/vulkan/vk_mem_alloc.h b/thirdparty/vulkan/vk_mem_alloc.h
index 465864b363..0dfb66efc6 100644
--- a/thirdparty/vulkan/vk_mem_alloc.h
+++ b/thirdparty/vulkan/vk_mem_alloc.h
@@ -1,5 +1,5 @@
//
-// Copyright (c) 2017-2020 Advanced Micro Devices, Inc. All rights reserved.
+// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
@@ -40,31 +40,31 @@ Documentation of all members: vk_mem_alloc.h
- <b>User guide</b>
- \subpage quick_start
- - [Project setup](@ref quick_start_project_setup)
- - [Initialization](@ref quick_start_initialization)
- - [Resource allocation](@ref quick_start_resource_allocation)
+ - [Project setup](@ref quick_start_project_setup)
+ - [Initialization](@ref quick_start_initialization)
+ - [Resource allocation](@ref quick_start_resource_allocation)
- \subpage choosing_memory_type
- - [Usage](@ref choosing_memory_type_usage)
- - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags)
- - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types)
- - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools)
- - [Dedicated allocations](@ref choosing_memory_type_dedicated_allocations)
+ - [Usage](@ref choosing_memory_type_usage)
+ - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags)
+ - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types)
+ - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools)
+ - [Dedicated allocations](@ref choosing_memory_type_dedicated_allocations)
- \subpage memory_mapping
- - [Mapping functions](@ref memory_mapping_mapping_functions)
- - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory)
- - [Cache flush and invalidate](@ref memory_mapping_cache_control)
- - [Finding out if memory is mappable](@ref memory_mapping_finding_if_memory_mappable)
+ - [Mapping functions](@ref memory_mapping_mapping_functions)
+ - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory)
+ - [Cache flush and invalidate](@ref memory_mapping_cache_control)
+ - [Finding out if memory is mappable](@ref memory_mapping_finding_if_memory_mappable)
- \subpage staying_within_budget
- - [Querying for budget](@ref staying_within_budget_querying_for_budget)
- - [Controlling memory usage](@ref staying_within_budget_controlling_memory_usage)
+ - [Querying for budget](@ref staying_within_budget_querying_for_budget)
+ - [Controlling memory usage](@ref staying_within_budget_controlling_memory_usage)
- \subpage custom_memory_pools
- - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex)
- - [Linear allocation algorithm](@ref linear_algorithm)
- - [Free-at-once](@ref linear_algorithm_free_at_once)
- - [Stack](@ref linear_algorithm_stack)
- - [Double stack](@ref linear_algorithm_double_stack)
- - [Ring buffer](@ref linear_algorithm_ring_buffer)
- - [Buddy allocation algorithm](@ref buddy_algorithm)
+ - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex)
+ - [Linear allocation algorithm](@ref linear_algorithm)
+ - [Free-at-once](@ref linear_algorithm_free_at_once)
+ - [Stack](@ref linear_algorithm_stack)
+ - [Double stack](@ref linear_algorithm_double_stack)
+ - [Ring buffer](@ref linear_algorithm_ring_buffer)
+ - [Buddy allocation algorithm](@ref buddy_algorithm)
- \subpage defragmentation
- [Defragmenting CPU memory](@ref defragmentation_cpu)
- [Defragmenting GPU memory](@ref defragmentation_gpu)
@@ -72,15 +72,15 @@ Documentation of all members: vk_mem_alloc.h
- [Writing custom allocation algorithm](@ref defragmentation_custom_algorithm)
- \subpage lost_allocations
- \subpage statistics
- - [Numeric statistics](@ref statistics_numeric_statistics)
- - [JSON dump](@ref statistics_json_dump)
+ - [Numeric statistics](@ref statistics_numeric_statistics)
+ - [JSON dump](@ref statistics_json_dump)
- \subpage allocation_annotation
- - [Allocation user data](@ref allocation_user_data)
- - [Allocation names](@ref allocation_names)
+ - [Allocation user data](@ref allocation_user_data)
+ - [Allocation names](@ref allocation_names)
- \subpage debugging_memory_usage
- - [Memory initialization](@ref debugging_memory_usage_initialization)
- - [Margins](@ref debugging_memory_usage_margins)
- - [Corruption detection](@ref debugging_memory_usage_corruption_detection)
+ - [Memory initialization](@ref debugging_memory_usage_initialization)
+ - [Margins](@ref debugging_memory_usage_margins)
+ - [Corruption detection](@ref debugging_memory_usage_corruption_detection)
- \subpage record_and_replay
- \subpage usage_patterns
- [Common mistakes](@ref usage_patterns_common_mistakes)
@@ -92,7 +92,6 @@ Documentation of all members: vk_mem_alloc.h
- [Device memory allocation callbacks](@ref allocation_callbacks)
- [Device heap memory limit](@ref heap_memory_limit)
- \subpage vk_khr_dedicated_allocation
- - \subpage vk_amd_device_coherent_memory
- \subpage general_considerations
- [Thread safety](@ref general_considerations_thread_safety)
- [Validation layer warnings](@ref general_considerations_validation_layer_warnings)
@@ -357,7 +356,7 @@ Example:
struct ConstantBuffer
{
- ...
+ ...
};
ConstantBuffer constantBufferData;
@@ -481,16 +480,16 @@ VkMemoryPropertyFlags memFlags;
vmaGetMemoryTypeProperties(allocator, allocInfo.memoryType, &memFlags);
if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
{
- // Allocation ended up in mappable memory. You can map it and access it directly.
- void* mappedData;
- vmaMapMemory(allocator, alloc, &mappedData);
- memcpy(mappedData, &constantBufferData, sizeof(constantBufferData));
- vmaUnmapMemory(allocator, alloc);
+ // Allocation ended up in mappable memory. You can map it and access it directly.
+ void* mappedData;
+ vmaMapMemory(allocator, alloc, &mappedData);
+ memcpy(mappedData, &constantBufferData, sizeof(constantBufferData));
+ vmaUnmapMemory(allocator, alloc);
}
else
{
- // Allocation ended up in non-mappable memory.
- // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
+ // Allocation ended up in non-mappable memory.
+ // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
}
\endcode
@@ -516,14 +515,14 @@ vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allo
if(allocInfo.pUserData != nullptr)
{
- // Allocation ended up in mappable memory.
- // It's persistently mapped. You can access it directly.
- memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData));
+ // Allocation ended up in mappable memory.
+ // It's persistently mapped. You can access it directly.
+ memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData));
}
else
{
- // Allocation ended up in non-mappable memory.
- // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
+ // Allocation ended up in non-mappable memory.
+ // You need to create CPU-side buffer in VMA_MEMORY_USAGE_CPU_ONLY and make a transfer.
}
\endcode
@@ -881,22 +880,22 @@ vmaDefragmentationEnd(allocator, defragCtx);
for(uint32_t i = 0; i < allocCount; ++i)
{
- if(allocationsChanged[i])
- {
- // Destroy buffer that is immutably bound to memory region which is no longer valid.
- vkDestroyBuffer(device, buffers[i], nullptr);
-
- // Create new buffer with same parameters.
- VkBufferCreateInfo bufferInfo = ...;
- vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
-
- // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
-
- // Bind new buffer to new memory region. Data contained in it is already moved.
- VmaAllocationInfo allocInfo;
- vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
- vmaBindBufferMemory(allocator, allocations[i], buffers[i]);
- }
+ if(allocationsChanged[i])
+ {
+ // Destroy buffer that is immutably bound to memory region which is no longer valid.
+ vkDestroyBuffer(device, buffers[i], nullptr);
+
+ // Create new buffer with same parameters.
+ VkBufferCreateInfo bufferInfo = ...;
+ vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
+
+ // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
+
+ // Bind new buffer to new memory region. Data contained in it is already moved.
+ VmaAllocationInfo allocInfo;
+ vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
+ vmaBindBufferMemory(allocator, allocations[i], buffers[i]);
+ }
}
\endcode
@@ -959,22 +958,22 @@ vmaDefragmentationEnd(allocator, defragCtx);
for(uint32_t i = 0; i < allocCount; ++i)
{
- if(allocationsChanged[i])
- {
- // Destroy buffer that is immutably bound to memory region which is no longer valid.
- vkDestroyBuffer(device, buffers[i], nullptr);
-
- // Create new buffer with same parameters.
- VkBufferCreateInfo bufferInfo = ...;
- vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
-
- // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
-
- // Bind new buffer to new memory region. Data contained in it is already moved.
- VmaAllocationInfo allocInfo;
- vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
- vmaBindBufferMemory(allocator, allocations[i], buffers[i]);
- }
+ if(allocationsChanged[i])
+ {
+ // Destroy buffer that is immutably bound to memory region which is no longer valid.
+ vkDestroyBuffer(device, buffers[i], nullptr);
+
+ // Create new buffer with same parameters.
+ VkBufferCreateInfo bufferInfo = ...;
+ vkCreateBuffer(device, &bufferInfo, nullptr, &buffers[i]);
+
+ // You can make dummy call to vkGetBufferMemoryRequirements here to silence validation layer warning.
+
+ // Bind new buffer to new memory region. Data contained in it is already moved.
+ VmaAllocationInfo allocInfo;
+ vmaGetAllocationInfo(allocator, allocations[i], &allocInfo);
+ vmaBindBufferMemory(allocator, allocations[i], buffers[i]);
+ }
}
\endcode
@@ -1088,40 +1087,40 @@ Example code:
\code
struct MyBuffer
{
- VkBuffer m_Buf = nullptr;
- VmaAllocation m_Alloc = nullptr;
+ VkBuffer m_Buf = nullptr;
+ VmaAllocation m_Alloc = nullptr;
- // Called when the buffer is really needed in the current frame.
- void EnsureBuffer();
+ // Called when the buffer is really needed in the current frame.
+ void EnsureBuffer();
};
void MyBuffer::EnsureBuffer()
{
- // Buffer has been created.
- if(m_Buf != VK_NULL_HANDLE)
- {
- // Check if its allocation is not lost + mark it as used in current frame.
- if(vmaTouchAllocation(allocator, m_Alloc))
- {
- // It's all OK - safe to use m_Buf.
- return;
- }
- }
+ // Buffer has been created.
+ if(m_Buf != VK_NULL_HANDLE)
+ {
+ // Check if its allocation is not lost + mark it as used in current frame.
+ if(vmaTouchAllocation(allocator, m_Alloc))
+ {
+ // It's all OK - safe to use m_Buf.
+ return;
+ }
+ }
- // Buffer not yet exists or lost - destroy and recreate it.
+ // Buffer not yet exists or lost - destroy and recreate it.
- vmaDestroyBuffer(allocator, m_Buf, m_Alloc);
+ vmaDestroyBuffer(allocator, m_Buf, m_Alloc);
- VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
- bufCreateInfo.size = 1024;
- bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
+ bufCreateInfo.size = 1024;
+ bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- VmaAllocationCreateInfo allocCreateInfo = {};
- allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
- allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
- VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
+ VmaAllocationCreateInfo allocCreateInfo = {};
+ allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
+ allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
+ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
- vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &m_Buf, &m_Alloc, nullptr);
+ vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &m_Buf, &m_Alloc, nullptr);
}
\endcode
@@ -1269,9 +1268,6 @@ printf("Image name: %s\n", imageName);
That string is also printed in JSON report created by vmaBuildStatsString().
-\note Passing string name to VMA allocation doesn't automatically set it to the Vulkan buffer or image created with it.
-You must do it manually using an extension like VK_EXT_debug_utils, which is independent of this library.
-
\page debugging_memory_usage Debugging incorrect memory usage
@@ -1405,7 +1401,7 @@ Its project is generated by Premake.
Command line syntax is printed when the program is launched without parameters.
Basic usage:
- VmaReplay.exe MyRecording.csv
+ VmaReplay.exe MyRecording.csv
<b>Documentation of file format</b> can be found in file: "docs/Recording file format.md".
It's a human-readable, text file in CSV format (Comma Separated Values).
@@ -1665,7 +1661,7 @@ buffer using vmaCreateBuffer() or image using vmaCreateImage().
When using the extension together with Vulkan Validation Layer, you will receive
warnings like this:
- vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer.
+ vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer.
It is OK, you should just ignore it. It happens because you use function
`vkGetBufferMemoryRequirements2KHR()` instead of standard
@@ -1674,68 +1670,11 @@ unaware of it.
To learn more about this extension, see:
-- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap44.html#VK_KHR_dedicated_allocation)
+- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html#VK_KHR_dedicated_allocation)
- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5)
-\page vk_amd_device_coherent_memory VK_AMD_device_coherent_memory
-
-VK_AMD_device_coherent_memory is a device extension that enables access to
-additional memory types with `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and
-`VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flag. It is useful mostly for
-allocation of buffers intended for writing "breadcrumb markers" in between passes
-or draw calls, which in turn are useful for debugging GPU crash/hang/TDR cases.
-
-When the extension is available but has not been enabled, Vulkan physical device
-still exposes those memory types, but their usage is forbidden. VMA automatically
-takes care of that - it returns `VK_ERROR_FEATURE_NOT_PRESENT` when an attempt
-to allocate memory of such type is made.
-
-If you want to use this extension in connection with VMA, follow these steps:
-
-\section vk_amd_device_coherent_memory_initialization Initialization
-
-1) Call `vkEnumerateDeviceExtensionProperties` for the physical device.
-Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_AMD_device_coherent_memory".
-
-2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`.
-Attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to `VkPhysicalDeviceFeatures2::pNext` to be returned.
-Check if the device feature is really supported - check if `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true.
-
-3) While creating device with `vkCreateDevice`, enable this extension - add "VK_AMD_device_coherent_memory"
-to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`.
-
-4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`.
-Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`.
-Enable this device feature - attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to
-`VkPhysicalDeviceFeatures2::pNext` and set its member `deviceCoherentMemory` to `VK_TRUE`.
-
-5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you
-have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT
-to VmaAllocatorCreateInfo::flags.
-
-\section vk_amd_device_coherent_memory_usage Usage
-
-After following steps described above, you can create VMA allocations and custom pools
-out of the special `DEVICE_COHERENT` and `DEVICE_UNCACHED` memory types on eligible
-devices. There are multiple ways to do it, for example:
-
-- You can request or prefer to allocate out of such memory types by adding
- `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` to VmaAllocationCreateInfo::requiredFlags
- or VmaAllocationCreateInfo::preferredFlags. Those flags can be freely mixed with
- other ways of \ref choosing_memory_type, like setting VmaAllocationCreateInfo::usage.
-- If you manually found memory type index to use for this purpose, force allocation
- from this specific index by setting VmaAllocationCreateInfo::memoryTypeBits `= 1u << index`.
-
-\section vk_amd_device_coherent_memory_more_information More information
-
-To learn more about this extension, see [VK_AMD_device_coherent_memory in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap44.html#VK_AMD_device_coherent_memory)
-
-Example use of this extension can be found in the code of the sample and test suite
-accompanying this library.
-
-
\page general_considerations General considerations
\section general_considerations_thread_safety Thread safety
@@ -1762,14 +1701,14 @@ to just ignore them.
- *vkBindBufferMemory(): Binding memory to buffer 0xeb8e4 but vkGetBufferMemoryRequirements() has not been called on that buffer.*
- It happens when VK_KHR_dedicated_allocation extension is enabled.
- `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it.
+ `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it.
- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.*
- It happens when you map a buffer or image, because the library maps entire
- `VkDeviceMemory` block, where different types of images and buffers may end
- up together, especially on GPUs with unified memory like Intel.
+ `VkDeviceMemory` block, where different types of images and buffers may end
+ up together, especially on GPUs with unified memory like Intel.
- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.*
- It happens when you use lost allocations, and a new image or buffer is
- created in place of an existing object that bacame lost.
+ created in place of an existing object that bacame lost.
- It may happen also when you use [defragmentation](@ref defragmentation).
\section general_considerations_allocation_algorithm Allocation algorithm
@@ -1824,54 +1763,54 @@ Define this macro to 0/1 to disable/enable support for recording functionality,
available through VmaAllocatorCreateInfo::pRecordSettings.
*/
#ifndef VMA_RECORDING_ENABLED
- #define VMA_RECORDING_ENABLED 0
+ #define VMA_RECORDING_ENABLED 0
#endif
#ifndef NOMINMAX
- #define NOMINMAX // For windows.h
+ #define NOMINMAX // For windows.h
#endif
#ifndef VULKAN_H_
- #include <vulkan/vulkan.h>
+ #include <vulkan/vulkan.h>
#endif
#if VMA_RECORDING_ENABLED
- #include <windows.h>
+ #include <windows.h>
#endif
// Define this macro to declare maximum supported Vulkan version in format AAABBBCCC,
// where AAA = major, BBB = minor, CCC = patch.
// If you want to use version > 1.0, it still needs to be enabled via VmaAllocatorCreateInfo::vulkanApiVersion.
#if !defined(VMA_VULKAN_VERSION)
- #if defined(VK_VERSION_1_1)
- #define VMA_VULKAN_VERSION 1001000
- #else
- #define VMA_VULKAN_VERSION 1000000
- #endif
+ #if defined(VK_VERSION_1_1)
+ #define VMA_VULKAN_VERSION 1001000
+ #else
+ #define VMA_VULKAN_VERSION 1000000
+ #endif
#endif
#if !defined(VMA_DEDICATED_ALLOCATION)
- #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
- #define VMA_DEDICATED_ALLOCATION 1
- #else
- #define VMA_DEDICATED_ALLOCATION 0
- #endif
+ #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
+ #define VMA_DEDICATED_ALLOCATION 1
+ #else
+ #define VMA_DEDICATED_ALLOCATION 0
+ #endif
#endif
#if !defined(VMA_BIND_MEMORY2)
- #if VK_KHR_bind_memory2
- #define VMA_BIND_MEMORY2 1
- #else
- #define VMA_BIND_MEMORY2 0
- #endif
+ #if VK_KHR_bind_memory2
+ #define VMA_BIND_MEMORY2 1
+ #else
+ #define VMA_BIND_MEMORY2 0
+ #endif
#endif
#if !defined(VMA_MEMORY_BUDGET)
- #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
- #define VMA_MEMORY_BUDGET 1
- #else
- #define VMA_MEMORY_BUDGET 0
- #endif
+ #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
+ #define VMA_MEMORY_BUDGET 1
+ #else
+ #define VMA_MEMORY_BUDGET 0
+ #endif
#endif
// Define these macros to decorate all public functions with additional code,
@@ -1880,10 +1819,10 @@ available through VmaAllocatorCreateInfo::pRecordSettings.
// #define VMA_CALL_PRE __declspec(dllexport)
// #define VMA_CALL_POST __cdecl
#ifndef VMA_CALL_PRE
- #define VMA_CALL_PRE
+ #define VMA_CALL_PRE
#endif
#ifndef VMA_CALL_POST
- #define VMA_CALL_POST
+ #define VMA_CALL_POST
#endif
/** \struct VmaAllocator
@@ -1899,16 +1838,16 @@ VK_DEFINE_HANDLE(VmaAllocator)
/// Callback function called after successful vkAllocateMemory.
typedef void (VKAPI_PTR *PFN_vmaAllocateDeviceMemoryFunction)(
- VmaAllocator allocator,
- uint32_t memoryType,
- VkDeviceMemory memory,
- VkDeviceSize size);
+ VmaAllocator allocator,
+ uint32_t memoryType,
+ VkDeviceMemory memory,
+ VkDeviceSize size);
/// Callback function called before vkFreeMemory.
typedef void (VKAPI_PTR *PFN_vmaFreeDeviceMemoryFunction)(
- VmaAllocator allocator,
- uint32_t memoryType,
- VkDeviceMemory memory,
- VkDeviceSize size);
+ VmaAllocator allocator,
+ uint32_t memoryType,
+ VkDeviceMemory memory,
+ VkDeviceSize size);
/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`.
@@ -1918,91 +1857,73 @@ allocations or total amount of memory allocated in Vulkan.
Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks.
*/
typedef struct VmaDeviceMemoryCallbacks {
- /// Optional, can be null.
- PFN_vmaAllocateDeviceMemoryFunction pfnAllocate;
- /// Optional, can be null.
- PFN_vmaFreeDeviceMemoryFunction pfnFree;
+ /// Optional, can be null.
+ PFN_vmaAllocateDeviceMemoryFunction pfnAllocate;
+ /// Optional, can be null.
+ PFN_vmaFreeDeviceMemoryFunction pfnFree;
} VmaDeviceMemoryCallbacks;
/// Flags for created #VmaAllocator.
typedef enum VmaAllocatorCreateFlagBits {
- /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you.
-
- Using this flag may increase performance because internal mutexes are not used.
- */
- VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001,
- /** \brief Enables usage of VK_KHR_dedicated_allocation extension.
-
- The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`.
- When it's `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1.
-
- Using this extenion will automatically allocate dedicated blocks of memory for
- some buffers and images instead of suballocating place for them out of bigger
- memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
- flag) when it is recommended by the driver. It may improve performance on some
- GPUs.
-
- You may set this flag only if you found out that following device extensions are
- supported, you enabled them while creating Vulkan device passed as
- VmaAllocatorCreateInfo::device, and you want them to be used internally by this
- library:
-
- - VK_KHR_get_memory_requirements2 (device extension)
- - VK_KHR_dedicated_allocation (device extension)
-
- When this flag is set, you can experience following warnings reported by Vulkan
- validation layer. You can ignore them.
-
- > vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer.
- */
- VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002,
- /**
- Enables usage of VK_KHR_bind_memory2 extension.
-
- The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`.
- When it's `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1.
-
- You may set this flag only if you found out that this device extension is supported,
- you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device,
- and you want it to be used internally by this library.
-
- The extension provides functions `vkBindBufferMemory2KHR` and `vkBindImageMemory2KHR`,
- which allow to pass a chain of `pNext` structures while binding.
- This flag is required if you use `pNext` parameter in vmaBindBufferMemory2() or vmaBindImageMemory2().
- */
- VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT = 0x00000004,
- /**
- Enables usage of VK_EXT_memory_budget extension.
-
- You may set this flag only if you found out that this device extension is supported,
- you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device,
- and you want it to be used internally by this library, along with another instance extension
- VK_KHR_get_physical_device_properties2, which is required by it (or Vulkan 1.1, where this extension is promoted).
-
- The extension provides query for current memory usage and budget, which will probably
- be more accurate than an estimation used by the library otherwise.
- */
- VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT = 0x00000008,
- /**
- Enabled usage of VK_AMD_device_coherent_memory extension.
-
- You may set this flag only if you:
-
- - found out that this device extension is supported and enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device,
- - checked that `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true and set it while creating the Vulkan device,
- - want it to be used internally by this library.
-
- The extension and accompanying device feature provide access to memory types with
- `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flags.
- They are useful mostly for writing breadcrumb markers - a common method for debugging GPU crash/hang/TDR.
-
- When the extension is not enabled, such memory types are still enumerated, but their usage is illegal.
- To protect from this error, if you don't create the allocator with this flag, it will refuse to allocate any memory or create a custom pool in such memory type,
- returning `VK_ERROR_FEATURE_NOT_PRESENT`.
- */
- VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT = 0x00000010,
-
- VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+ /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you.
+
+ Using this flag may increase performance because internal mutexes are not used.
+ */
+ VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001,
+ /** \brief Enables usage of VK_KHR_dedicated_allocation extension.
+
+ The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`.
+ When it's `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1.
+
+ Using this extenion will automatically allocate dedicated blocks of memory for
+ some buffers and images instead of suballocating place for them out of bigger
+ memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT
+ flag) when it is recommended by the driver. It may improve performance on some
+ GPUs.
+
+ You may set this flag only if you found out that following device extensions are
+ supported, you enabled them while creating Vulkan device passed as
+ VmaAllocatorCreateInfo::device, and you want them to be used internally by this
+ library:
+
+ - VK_KHR_get_memory_requirements2 (device extension)
+ - VK_KHR_dedicated_allocation (device extension)
+
+ When this flag is set, you can experience following warnings reported by Vulkan
+ validation layer. You can ignore them.
+
+ > vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer.
+ */
+ VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002,
+ /**
+ Enables usage of VK_KHR_bind_memory2 extension.
+
+ The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`.
+ When it's `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1.
+
+ You may set this flag only if you found out that this device extension is supported,
+ you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device,
+ and you want it to be used internally by this library.
+
+ The extension provides functions `vkBindBufferMemory2KHR` and `vkBindImageMemory2KHR`,
+ which allow to pass a chain of `pNext` structures while binding.
+ This flag is required if you use `pNext` parameter in vmaBindBufferMemory2() or vmaBindImageMemory2().
+ */
+ VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT = 0x00000004,
+ /**
+ Enables usage of VK_EXT_memory_budget extension.
+
+ You may set this flag only if you found out that this device extension is supported,
+ you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device,
+ and you want it to be used internally by this library, along with another instance extension
+ VK_KHR_get_physical_device_properties2, which is required by it (or Vulkan 1.1, where this extension is promoted).
+
+ The extension provides query for current memory usage and budget, which will probably
+ be more accurate than an estimation used by the library otherwise.
+ */
+ VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT = 0x00000008,
+
+ VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VmaAllocatorCreateFlagBits;
typedef VkFlags VmaAllocatorCreateFlags;
@@ -2011,183 +1932,183 @@ typedef VkFlags VmaAllocatorCreateFlags;
Used in VmaAllocatorCreateInfo::pVulkanFunctions.
*/
typedef struct VmaVulkanFunctions {
- PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
- PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
- PFN_vkAllocateMemory vkAllocateMemory;
- PFN_vkFreeMemory vkFreeMemory;
- PFN_vkMapMemory vkMapMemory;
- PFN_vkUnmapMemory vkUnmapMemory;
- PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
- PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
- PFN_vkBindBufferMemory vkBindBufferMemory;
- PFN_vkBindImageMemory vkBindImageMemory;
- PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
- PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
- PFN_vkCreateBuffer vkCreateBuffer;
- PFN_vkDestroyBuffer vkDestroyBuffer;
- PFN_vkCreateImage vkCreateImage;
- PFN_vkDestroyImage vkDestroyImage;
- PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
+ PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties;
+ PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties;
+ PFN_vkAllocateMemory vkAllocateMemory;
+ PFN_vkFreeMemory vkFreeMemory;
+ PFN_vkMapMemory vkMapMemory;
+ PFN_vkUnmapMemory vkUnmapMemory;
+ PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges;
+ PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges;
+ PFN_vkBindBufferMemory vkBindBufferMemory;
+ PFN_vkBindImageMemory vkBindImageMemory;
+ PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements;
+ PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements;
+ PFN_vkCreateBuffer vkCreateBuffer;
+ PFN_vkDestroyBuffer vkDestroyBuffer;
+ PFN_vkCreateImage vkCreateImage;
+ PFN_vkDestroyImage vkDestroyImage;
+ PFN_vkCmdCopyBuffer vkCmdCopyBuffer;
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
- PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
+ PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
+ PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
#endif
#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
- PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
- PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
+ PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
+ PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
#endif
#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
- PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
+ PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
#endif
} VmaVulkanFunctions;
/// Flags to be used in VmaRecordSettings::flags.
typedef enum VmaRecordFlagBits {
- /** \brief Enables flush after recording every function call.
-
- Enable it if you expect your application to crash, which may leave recording file truncated.
- It may degrade performance though.
- */
- VMA_RECORD_FLUSH_AFTER_CALL_BIT = 0x00000001,
-
- VMA_RECORD_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+ /** \brief Enables flush after recording every function call.
+
+ Enable it if you expect your application to crash, which may leave recording file truncated.
+ It may degrade performance though.
+ */
+ VMA_RECORD_FLUSH_AFTER_CALL_BIT = 0x00000001,
+
+ VMA_RECORD_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VmaRecordFlagBits;
typedef VkFlags VmaRecordFlags;
/// Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSettings.
typedef struct VmaRecordSettings
{
- /// Flags for recording. Use #VmaRecordFlagBits enum.
- VmaRecordFlags flags;
- /** \brief Path to the file that should be written by the recording.
+ /// Flags for recording. Use #VmaRecordFlagBits enum.
+ VmaRecordFlags flags;
+ /** \brief Path to the file that should be written by the recording.
- Suggested extension: "csv".
- If the file already exists, it will be overwritten.
- It will be opened for the whole time #VmaAllocator object is alive.
- If opening this file fails, creation of the whole allocator object fails.
- */
- const char* pFilePath;
+ Suggested extension: "csv".
+ If the file already exists, it will be overwritten.
+ It will be opened for the whole time #VmaAllocator object is alive.
+ If opening this file fails, creation of the whole allocator object fails.
+ */
+ const char* pFilePath;
} VmaRecordSettings;
/// Description of a Allocator to be created.
typedef struct VmaAllocatorCreateInfo
{
- /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum.
- VmaAllocatorCreateFlags flags;
- /// Vulkan physical device.
- /** It must be valid throughout whole lifetime of created allocator. */
- VkPhysicalDevice physicalDevice;
- /// Vulkan device.
- /** It must be valid throughout whole lifetime of created allocator. */
- VkDevice device;
- /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional.
- /** Set to 0 to use default, which is currently 256 MiB. */
- VkDeviceSize preferredLargeHeapBlockSize;
- /// Custom CPU memory allocation callbacks. Optional.
- /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */
- const VkAllocationCallbacks* pAllocationCallbacks;
- /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional.
- /** Optional, can be null. */
- const VmaDeviceMemoryCallbacks* pDeviceMemoryCallbacks;
- /** \brief Maximum number of additional frames that are in use at the same time as current frame.
-
- This value is used only when you make allocations with
- VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
- lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
-
- For example, if you double-buffer your command buffers, so resources used for
- rendering in previous frame may still be in use by the GPU at the moment you
- allocate resources needed for the current frame, set this value to 1.
-
- If you want to allow any allocations other than used in the current frame to
- become lost, set this value to 0.
- */
- uint32_t frameInUseCount;
- /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap.
-
- If not NULL, it must be a pointer to an array of
- `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on
- maximum number of bytes that can be allocated out of particular Vulkan memory
- heap.
-
- Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that
- heap. This is also the default in case of `pHeapSizeLimit` = NULL.
-
- If there is a limit defined for a heap:
-
- - If user tries to allocate more memory from that heap using this allocator,
- the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
- - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the
- value of this limit will be reported instead when using vmaGetMemoryProperties().
-
- Warning! Using this feature may not be equivalent to installing a GPU with
- smaller amount of memory, because graphics driver doesn't necessary fail new
- allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is
- exceeded. It may return success and just silently migrate some device memory
- blocks to system RAM. This driver behavior can also be controlled using
- VK_AMD_memory_overallocation_behavior extension.
- */
- const VkDeviceSize* pHeapSizeLimit;
- /** \brief Pointers to Vulkan functions. Can be null if you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1`.
-
- If you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1` in configuration section,
- you can pass null as this member, because the library will fetch pointers to
- Vulkan functions internally in a static way, like:
-
- vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
-
- Fill this member if you want to provide your own pointers to Vulkan functions,
- e.g. fetched using `vkGetInstanceProcAddr()` and `vkGetDeviceProcAddr()`.
- */
- const VmaVulkanFunctions* pVulkanFunctions;
- /** \brief Parameters for recording of VMA calls. Can be null.
-
- If not null, it enables recording of calls to VMA functions to a file.
- If support for recording is not enabled using `VMA_RECORDING_ENABLED` macro,
- creation of the allocator object fails with `VK_ERROR_FEATURE_NOT_PRESENT`.
- */
- const VmaRecordSettings* pRecordSettings;
- /** \brief Optional handle to Vulkan instance object.
-
- Optional, can be null. Must be set if #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT flas is used
- or if `vulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)`.
- */
- VkInstance instance;
- /** \brief Optional. The highest version of Vulkan that the application is designed to use.
-
- It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`.
- The patch version number specified is ignored. Only the major and minor versions are considered.
- It must be less or euqal (preferably equal) to value as passed to `vkCreateInstance` as `VkApplicationInfo::apiVersion`.
- Only versions 1.0 and 1.1 are supported by the current implementation.
- Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`.
- */
- uint32_t vulkanApiVersion;
+ /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum.
+ VmaAllocatorCreateFlags flags;
+ /// Vulkan physical device.
+ /** It must be valid throughout whole lifetime of created allocator. */
+ VkPhysicalDevice physicalDevice;
+ /// Vulkan device.
+ /** It must be valid throughout whole lifetime of created allocator. */
+ VkDevice device;
+ /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional.
+ /** Set to 0 to use default, which is currently 256 MiB. */
+ VkDeviceSize preferredLargeHeapBlockSize;
+ /// Custom CPU memory allocation callbacks. Optional.
+ /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */
+ const VkAllocationCallbacks* pAllocationCallbacks;
+ /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional.
+ /** Optional, can be null. */
+ const VmaDeviceMemoryCallbacks* pDeviceMemoryCallbacks;
+ /** \brief Maximum number of additional frames that are in use at the same time as current frame.
+
+ This value is used only when you make allocations with
+ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
+ lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
+
+ For example, if you double-buffer your command buffers, so resources used for
+ rendering in previous frame may still be in use by the GPU at the moment you
+ allocate resources needed for the current frame, set this value to 1.
+
+ If you want to allow any allocations other than used in the current frame to
+ become lost, set this value to 0.
+ */
+ uint32_t frameInUseCount;
+ /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap.
+
+ If not NULL, it must be a pointer to an array of
+ `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on
+ maximum number of bytes that can be allocated out of particular Vulkan memory
+ heap.
+
+ Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that
+ heap. This is also the default in case of `pHeapSizeLimit` = NULL.
+
+ If there is a limit defined for a heap:
+
+ - If user tries to allocate more memory from that heap using this allocator,
+ the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
+ - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the
+ value of this limit will be reported instead when using vmaGetMemoryProperties().
+
+ Warning! Using this feature may not be equivalent to installing a GPU with
+ smaller amount of memory, because graphics driver doesn't necessary fail new
+ allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is
+ exceeded. It may return success and just silently migrate some device memory
+ blocks to system RAM. This driver behavior can also be controlled using
+ VK_AMD_memory_overallocation_behavior extension.
+ */
+ const VkDeviceSize* pHeapSizeLimit;
+ /** \brief Pointers to Vulkan functions. Can be null if you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1`.
+
+ If you leave define `VMA_STATIC_VULKAN_FUNCTIONS 1` in configuration section,
+ you can pass null as this member, because the library will fetch pointers to
+ Vulkan functions internally in a static way, like:
+
+ vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
+
+ Fill this member if you want to provide your own pointers to Vulkan functions,
+ e.g. fetched using `vkGetInstanceProcAddr()` and `vkGetDeviceProcAddr()`.
+ */
+ const VmaVulkanFunctions* pVulkanFunctions;
+ /** \brief Parameters for recording of VMA calls. Can be null.
+
+ If not null, it enables recording of calls to VMA functions to a file.
+ If support for recording is not enabled using `VMA_RECORDING_ENABLED` macro,
+ creation of the allocator object fails with `VK_ERROR_FEATURE_NOT_PRESENT`.
+ */
+ const VmaRecordSettings* pRecordSettings;
+ /** \brief Optional handle to Vulkan instance object.
+
+ Optional, can be null. Must be set if #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT flas is used
+ or if `vulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)`.
+ */
+ VkInstance instance;
+ /** \brief Optional. The highest version of Vulkan that the application is designed to use.
+
+ It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`.
+ The patch version number specified is ignored. Only the major and minor versions are considered.
+ It must be less or euqal (preferably equal) to value as passed to `vkCreateInstance` as `VkApplicationInfo::apiVersion`.
+ Only versions 1.0 and 1.1 are supported by the current implementation.
+ Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`.
+ */
+ uint32_t vulkanApiVersion;
} VmaAllocatorCreateInfo;
/// Creates Allocator object.
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator(
- const VmaAllocatorCreateInfo* pCreateInfo,
- VmaAllocator* pAllocator);
+ const VmaAllocatorCreateInfo* pCreateInfo,
+ VmaAllocator* pAllocator);
/// Destroys allocator object.
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator(
- VmaAllocator allocator);
+ VmaAllocator allocator);
/**
PhysicalDeviceProperties are fetched from physicalDevice by the allocator.
You can access it here, without fetching it again on your own.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
+ VmaAllocator allocator,
+ const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
/**
PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator.
You can access it here, without fetching it again on your own.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
+ VmaAllocator allocator,
+ const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
/**
\brief Given Memory Type Index, returns Property Flags of this memory type.
@@ -2196,9 +2117,9 @@ This is just a convenience function. Same information can be obtained using
vmaGetMemoryProperties().
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties(
- VmaAllocator allocator,
- uint32_t memoryTypeIndex,
- VkMemoryPropertyFlags* pFlags);
+ VmaAllocator allocator,
+ uint32_t memoryTypeIndex,
+ VkMemoryPropertyFlags* pFlags);
/** \brief Sets index of the current frame.
@@ -2209,33 +2130,33 @@ when a new frame begins. Allocations queried using vmaGetAllocationInfo() cannot
become lost in the current frame.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex(
- VmaAllocator allocator,
- uint32_t frameIndex);
+ VmaAllocator allocator,
+ uint32_t frameIndex);
/** \brief Calculated statistics of memory usage in entire allocator.
*/
typedef struct VmaStatInfo
{
- /// Number of `VkDeviceMemory` Vulkan memory blocks allocated.
- uint32_t blockCount;
- /// Number of #VmaAllocation allocation objects allocated.
- uint32_t allocationCount;
- /// Number of free ranges of memory between allocations.
- uint32_t unusedRangeCount;
- /// Total number of bytes occupied by all allocations.
- VkDeviceSize usedBytes;
- /// Total number of bytes occupied by unused ranges.
- VkDeviceSize unusedBytes;
- VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
- VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
+ /// Number of `VkDeviceMemory` Vulkan memory blocks allocated.
+ uint32_t blockCount;
+ /// Number of #VmaAllocation allocation objects allocated.
+ uint32_t allocationCount;
+ /// Number of free ranges of memory between allocations.
+ uint32_t unusedRangeCount;
+ /// Total number of bytes occupied by all allocations.
+ VkDeviceSize usedBytes;
+ /// Total number of bytes occupied by unused ranges.
+ VkDeviceSize unusedBytes;
+ VkDeviceSize allocationSizeMin, allocationSizeAvg, allocationSizeMax;
+ VkDeviceSize unusedRangeSizeMin, unusedRangeSizeAvg, unusedRangeSizeMax;
} VmaStatInfo;
/// General statistics from current state of Allocator.
typedef struct VmaStats
{
- VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
- VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
- VmaStatInfo total;
+ VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES];
+ VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS];
+ VmaStatInfo total;
} VmaStats;
/** \brief Retrieves statistics from current state of the Allocator.
@@ -2248,48 +2169,48 @@ Note that when using allocator from multiple threads, returned information may i
become outdated.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStats(
- VmaAllocator allocator,
- VmaStats* pStats);
+ VmaAllocator allocator,
+ VmaStats* pStats);
/** \brief Statistics of current memory usage and available budget, in bytes, for specific memory heap.
*/
typedef struct VmaBudget
{
- /** \brief Sum size of all `VkDeviceMemory` blocks allocated from particular heap, in bytes.
- */
- VkDeviceSize blockBytes;
-
- /** \brief Sum size of all allocations created in particular heap, in bytes.
-
- Usually less or equal than `blockBytes`.
- Difference `blockBytes - allocationBytes` is the amount of memory allocated but unused -
- available for new allocations or wasted due to fragmentation.
-
- It might be greater than `blockBytes` if there are some allocations in lost state, as they account
- to this value as well.
- */
- VkDeviceSize allocationBytes;
-
- /** \brief Estimated current memory usage of the program, in bytes.
-
- Fetched from system using `VK_EXT_memory_budget` extension if enabled.
-
- It might be different than `blockBytes` (usually higher) due to additional implicit objects
- also occupying the memory, like swapchain, pipelines, descriptor heaps, command buffers, or
- `VkDeviceMemory` blocks allocated outside of this library, if any.
- */
- VkDeviceSize usage;
-
- /** \brief Estimated amount of memory available to the program, in bytes.
-
- Fetched from system using `VK_EXT_memory_budget` extension if enabled.
-
- It might be different (most probably smaller) than `VkMemoryHeap::size[heapIndex]` due to factors
- external to the program, like other programs also consuming system resources.
- Difference `budget - usage` is the amount of additional memory that can probably
- be allocated without problems. Exceeding the budget may result in various problems.
- */
- VkDeviceSize budget;
+ /** \brief Sum size of all `VkDeviceMemory` blocks allocated from particular heap, in bytes.
+ */
+ VkDeviceSize blockBytes;
+
+ /** \brief Sum size of all allocations created in particular heap, in bytes.
+
+ Usually less or equal than `blockBytes`.
+ Difference `blockBytes - allocationBytes` is the amount of memory allocated but unused -
+ available for new allocations or wasted due to fragmentation.
+
+ It might be greater than `blockBytes` if there are some allocations in lost state, as they account
+ to this value as well.
+ */
+ VkDeviceSize allocationBytes;
+
+ /** \brief Estimated current memory usage of the program, in bytes.
+
+ Fetched from system using `VK_EXT_memory_budget` extension if enabled.
+
+ It might be different than `blockBytes` (usually higher) due to additional implicit objects
+ also occupying the memory, like swapchain, pipelines, descriptor heaps, command buffers, or
+ `VkDeviceMemory` blocks allocated outside of this library, if any.
+ */
+ VkDeviceSize usage;
+
+ /** \brief Estimated amount of memory available to the program, in bytes.
+
+ Fetched from system using `VK_EXT_memory_budget` extension if enabled.
+
+ It might be different (most probably smaller) than `VkMemoryHeap::size[heapIndex]` due to factors
+ external to the program, like other programs also consuming system resources.
+ Difference `budget - usage` is the amount of additional memory that can probably
+ be allocated without problems. Exceeding the budget may result in various problems.
+ */
+ VkDeviceSize budget;
} VmaBudget;
/** \brief Retrieves information about current memory budget for all memory heaps.
@@ -2303,8 +2224,8 @@ Note that when using allocator from multiple threads, returned information may i
become outdated.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetBudget(
- VmaAllocator allocator,
- VmaBudget* pBudget);
+ VmaAllocator allocator,
+ VmaBudget* pBudget);
#ifndef VMA_STATS_STRING_ENABLED
#define VMA_STATS_STRING_ENABLED 1
@@ -2316,13 +2237,13 @@ VMA_CALL_PRE void VMA_CALL_POST vmaGetBudget(
/** @param[out] ppStatsString Must be freed using vmaFreeStatsString() function.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString(
- VmaAllocator allocator,
- char** ppStatsString,
- VkBool32 detailedMap);
+ VmaAllocator allocator,
+ char** ppStatsString,
+ VkBool32 detailedMap);
VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString(
- VmaAllocator allocator,
- char* pStatsString);
+ VmaAllocator allocator,
+ char* pStatsString);
#endif // #if VMA_STATS_STRING_ENABLED
@@ -2338,224 +2259,224 @@ VK_DEFINE_HANDLE(VmaPool)
typedef enum VmaMemoryUsage
{
- /** No intended memory usage specified.
- Use other members of VmaAllocationCreateInfo to specify your requirements.
- */
- VMA_MEMORY_USAGE_UNKNOWN = 0,
- /** Memory will be used on device only, so fast access from the device is preferred.
- It usually means device-local GPU (video) memory.
- No need to be mappable on host.
- It is roughly equivalent of `D3D12_HEAP_TYPE_DEFAULT`.
-
- Usage:
-
- - Resources written and read by device, e.g. images used as attachments.
- - Resources transferred from host once (immutable) or infrequently and read by
- device multiple times, e.g. textures to be sampled, vertex buffers, uniform
- (constant) buffers, and majority of other types of resources used on GPU.
-
- Allocation may still end up in `HOST_VISIBLE` memory on some implementations.
- In such case, you are free to map it.
- You can use #VMA_ALLOCATION_CREATE_MAPPED_BIT with this usage type.
- */
- VMA_MEMORY_USAGE_GPU_ONLY = 1,
- /** Memory will be mappable on host.
- It usually means CPU (system) memory.
- Guarantees to be `HOST_VISIBLE` and `HOST_COHERENT`.
- CPU access is typically uncached. Writes may be write-combined.
- Resources created in this pool may still be accessible to the device, but access to them can be slow.
- It is roughly equivalent of `D3D12_HEAP_TYPE_UPLOAD`.
-
- Usage: Staging copy of resources used as transfer source.
- */
- VMA_MEMORY_USAGE_CPU_ONLY = 2,
- /**
- Memory that is both mappable on host (guarantees to be `HOST_VISIBLE`) and preferably fast to access by GPU.
- CPU access is typically uncached. Writes may be write-combined.
-
- Usage: Resources written frequently by host (dynamic), read by device. E.g. textures, vertex buffers, uniform buffers updated every frame or every draw call.
- */
- VMA_MEMORY_USAGE_CPU_TO_GPU = 3,
- /** Memory mappable on host (guarantees to be `HOST_VISIBLE`) and cached.
- It is roughly equivalent of `D3D12_HEAP_TYPE_READBACK`.
-
- Usage:
-
- - Resources written by device, read by host - results of some computations, e.g. screen capture, average scene luminance for HDR tone mapping.
- - Any resources read or accessed randomly on host, e.g. CPU-side copy of vertex buffer used as source of transfer, but also used for collision detection.
- */
- VMA_MEMORY_USAGE_GPU_TO_CPU = 4,
- /** CPU memory - memory that is preferably not `DEVICE_LOCAL`, but also not guaranteed to be `HOST_VISIBLE`.
-
- Usage: Staging copy of resources moved from GPU memory to CPU memory as part
- of custom paging/residency mechanism, to be moved back to GPU memory when needed.
- */
- VMA_MEMORY_USAGE_CPU_COPY = 5,
- /** Lazily allocated GPU memory having `VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT`.
- Exists mostly on mobile platforms. Using it on desktop PC or other GPUs with no such memory type present will fail the allocation.
-
- Usage: Memory for transient attachment images (color attachments, depth attachments etc.), created with `VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT`.
-
- Allocations with this usage are always created as dedicated - it implies #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
- */
- VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED = 6,
-
- VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF
+ /** No intended memory usage specified.
+ Use other members of VmaAllocationCreateInfo to specify your requirements.
+ */
+ VMA_MEMORY_USAGE_UNKNOWN = 0,
+ /** Memory will be used on device only, so fast access from the device is preferred.
+ It usually means device-local GPU (video) memory.
+ No need to be mappable on host.
+ It is roughly equivalent of `D3D12_HEAP_TYPE_DEFAULT`.
+
+ Usage:
+
+ - Resources written and read by device, e.g. images used as attachments.
+ - Resources transferred from host once (immutable) or infrequently and read by
+ device multiple times, e.g. textures to be sampled, vertex buffers, uniform
+ (constant) buffers, and majority of other types of resources used on GPU.
+
+ Allocation may still end up in `HOST_VISIBLE` memory on some implementations.
+ In such case, you are free to map it.
+ You can use #VMA_ALLOCATION_CREATE_MAPPED_BIT with this usage type.
+ */
+ VMA_MEMORY_USAGE_GPU_ONLY = 1,
+ /** Memory will be mappable on host.
+ It usually means CPU (system) memory.
+ Guarantees to be `HOST_VISIBLE` and `HOST_COHERENT`.
+ CPU access is typically uncached. Writes may be write-combined.
+ Resources created in this pool may still be accessible to the device, but access to them can be slow.
+ It is roughly equivalent of `D3D12_HEAP_TYPE_UPLOAD`.
+
+ Usage: Staging copy of resources used as transfer source.
+ */
+ VMA_MEMORY_USAGE_CPU_ONLY = 2,
+ /**
+ Memory that is both mappable on host (guarantees to be `HOST_VISIBLE`) and preferably fast to access by GPU.
+ CPU access is typically uncached. Writes may be write-combined.
+
+ Usage: Resources written frequently by host (dynamic), read by device. E.g. textures, vertex buffers, uniform buffers updated every frame or every draw call.
+ */
+ VMA_MEMORY_USAGE_CPU_TO_GPU = 3,
+ /** Memory mappable on host (guarantees to be `HOST_VISIBLE`) and cached.
+ It is roughly equivalent of `D3D12_HEAP_TYPE_READBACK`.
+
+ Usage:
+
+ - Resources written by device, read by host - results of some computations, e.g. screen capture, average scene luminance for HDR tone mapping.
+ - Any resources read or accessed randomly on host, e.g. CPU-side copy of vertex buffer used as source of transfer, but also used for collision detection.
+ */
+ VMA_MEMORY_USAGE_GPU_TO_CPU = 4,
+ /** CPU memory - memory that is preferably not `DEVICE_LOCAL`, but also not guaranteed to be `HOST_VISIBLE`.
+
+ Usage: Staging copy of resources moved from GPU memory to CPU memory as part
+ of custom paging/residency mechanism, to be moved back to GPU memory when needed.
+ */
+ VMA_MEMORY_USAGE_CPU_COPY = 5,
+ /** Lazily allocated GPU memory having `VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT`.
+ Exists mostly on mobile platforms. Using it on desktop PC or other GPUs with no such memory type present will fail the allocation.
+
+ Usage: Memory for transient attachment images (color attachments, depth attachments etc.), created with `VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT`.
+
+ Allocations with this usage are always created as dedicated - it implies #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
+ */
+ VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED = 6,
+
+ VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF
} VmaMemoryUsage;
/// Flags to be passed as VmaAllocationCreateInfo::flags.
typedef enum VmaAllocationCreateFlagBits {
- /** \brief Set this flag if the allocation should have its own memory block.
-
- Use it for special, big resources, like fullscreen images used as attachments.
+ /** \brief Set this flag if the allocation should have its own memory block.
+
+ Use it for special, big resources, like fullscreen images used as attachments.
- You should not use this flag if VmaAllocationCreateInfo::pool is not null.
- */
- VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001,
-
- /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block.
-
- If new allocation cannot be placed in any of the existing blocks, allocation
- fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error.
-
- You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and
- #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense.
-
- If VmaAllocationCreateInfo::pool is not null, this flag is implied and ignored. */
- VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002,
- /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
-
- Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData.
-
- Is it valid to use this flag for allocation made from memory type that is not
- `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is
- useful if you need an allocation that is efficient to use on GPU
- (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that
- support it (e.g. Intel GPU).
-
- You should not use this flag together with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT.
- */
- VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004,
- /** Allocation created with this flag can become lost as a result of another
- allocation with #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag, so you
- must check it before use.
-
- To check if allocation is not lost, call vmaGetAllocationInfo() and check if
- VmaAllocationInfo::deviceMemory is not `VK_NULL_HANDLE`.
-
- For details about supporting lost allocations, see Lost Allocations
- chapter of User Guide on Main Page.
-
- You should not use this flag together with #VMA_ALLOCATION_CREATE_MAPPED_BIT.
- */
- VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT = 0x00000008,
- /** While creating allocation using this flag, other allocations that were
- created with flag #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT can become lost.
-
- For details about supporting lost allocations, see Lost Allocations
- chapter of User Guide on Main Page.
- */
- VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT = 0x00000010,
- /** Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a
- null-terminated string. Instead of copying pointer value, a local copy of the
- string is made and stored in allocation's `pUserData`. The string is automatically
- freed together with the allocation. It is also used in vmaBuildStatsString().
- */
- VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020,
- /** Allocation will be created from upper stack in a double stack pool.
-
- This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag.
- */
- VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040,
- /** Create both buffer/image and allocation, but don't bind them together.
- It is useful when you want to bind yourself to do some more advanced binding, e.g. using some extensions.
- The flag is meaningful only with functions that bind by default: vmaCreateBuffer(), vmaCreateImage().
- Otherwise it is ignored.
- */
- VMA_ALLOCATION_CREATE_DONT_BIND_BIT = 0x00000080,
- /** Create allocation only if additional device memory required for it, if any, won't exceed
- memory budget. Otherwise return `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
- */
- VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT = 0x00000100,
-
- /** Allocation strategy that chooses smallest possible free range for the
- allocation.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = 0x00010000,
- /** Allocation strategy that chooses biggest possible free range for the
- allocation.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT = 0x00020000,
- /** Allocation strategy that chooses first suitable free range for the
- allocation.
-
- "First" doesn't necessarily means the one with smallest offset in memory,
- but rather the one that is easiest and fastest to find.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = 0x00040000,
-
- /** Allocation strategy that tries to minimize memory usage.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT,
- /** Allocation strategy that tries to minimize allocation time.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
- /** Allocation strategy that tries to minimize memory fragmentation.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT,
-
- /** A bit mask to extract only `STRATEGY` bits from entire set of flags.
- */
- VMA_ALLOCATION_CREATE_STRATEGY_MASK =
- VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT |
- VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT |
- VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
-
- VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+ You should not use this flag if VmaAllocationCreateInfo::pool is not null.
+ */
+ VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001,
+
+ /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block.
+
+ If new allocation cannot be placed in any of the existing blocks, allocation
+ fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error.
+
+ You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and
+ #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense.
+
+ If VmaAllocationCreateInfo::pool is not null, this flag is implied and ignored. */
+ VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002,
+ /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
+
+ Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData.
+
+ Is it valid to use this flag for allocation made from memory type that is not
+ `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is
+ useful if you need an allocation that is efficient to use on GPU
+ (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that
+ support it (e.g. Intel GPU).
+
+ You should not use this flag together with #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT.
+ */
+ VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004,
+ /** Allocation created with this flag can become lost as a result of another
+ allocation with #VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT flag, so you
+ must check it before use.
+
+ To check if allocation is not lost, call vmaGetAllocationInfo() and check if
+ VmaAllocationInfo::deviceMemory is not `VK_NULL_HANDLE`.
+
+ For details about supporting lost allocations, see Lost Allocations
+ chapter of User Guide on Main Page.
+
+ You should not use this flag together with #VMA_ALLOCATION_CREATE_MAPPED_BIT.
+ */
+ VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT = 0x00000008,
+ /** While creating allocation using this flag, other allocations that were
+ created with flag #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT can become lost.
+
+ For details about supporting lost allocations, see Lost Allocations
+ chapter of User Guide on Main Page.
+ */
+ VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT = 0x00000010,
+ /** Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a
+ null-terminated string. Instead of copying pointer value, a local copy of the
+ string is made and stored in allocation's `pUserData`. The string is automatically
+ freed together with the allocation. It is also used in vmaBuildStatsString().
+ */
+ VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020,
+ /** Allocation will be created from upper stack in a double stack pool.
+
+ This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag.
+ */
+ VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040,
+ /** Create both buffer/image and allocation, but don't bind them together.
+ It is useful when you want to bind yourself to do some more advanced binding, e.g. using some extensions.
+ The flag is meaningful only with functions that bind by default: vmaCreateBuffer(), vmaCreateImage().
+ Otherwise it is ignored.
+ */
+ VMA_ALLOCATION_CREATE_DONT_BIND_BIT = 0x00000080,
+ /** Create allocation only if additional device memory required for it, if any, won't exceed
+ memory budget. Otherwise return `VK_ERROR_OUT_OF_DEVICE_MEMORY`.
+ */
+ VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT = 0x00000100,
+
+ /** Allocation strategy that chooses smallest possible free range for the
+ allocation.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = 0x00010000,
+ /** Allocation strategy that chooses biggest possible free range for the
+ allocation.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT = 0x00020000,
+ /** Allocation strategy that chooses first suitable free range for the
+ allocation.
+
+ "First" doesn't necessarily means the one with smallest offset in memory,
+ but rather the one that is easiest and fastest to find.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = 0x00040000,
+
+ /** Allocation strategy that tries to minimize memory usage.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT,
+ /** Allocation strategy that tries to minimize allocation time.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
+ /** Allocation strategy that tries to minimize memory fragmentation.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT,
+
+ /** A bit mask to extract only `STRATEGY` bits from entire set of flags.
+ */
+ VMA_ALLOCATION_CREATE_STRATEGY_MASK =
+ VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT |
+ VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT |
+ VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT,
+
+ VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VmaAllocationCreateFlagBits;
typedef VkFlags VmaAllocationCreateFlags;
typedef struct VmaAllocationCreateInfo
{
- /// Use #VmaAllocationCreateFlagBits enum.
- VmaAllocationCreateFlags flags;
- /** \brief Intended usage of memory.
-
- You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n
- If `pool` is not null, this member is ignored.
- */
- VmaMemoryUsage usage;
- /** \brief Flags that must be set in a Memory Type chosen for an allocation.
-
- Leave 0 if you specify memory requirements in other way. \n
- If `pool` is not null, this member is ignored.*/
- VkMemoryPropertyFlags requiredFlags;
- /** \brief Flags that preferably should be set in a memory type chosen for an allocation.
-
- Set to 0 if no additional flags are prefered. \n
- If `pool` is not null, this member is ignored. */
- VkMemoryPropertyFlags preferredFlags;
- /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation.
-
- Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if
- it meets other requirements specified by this structure, with no further
- restrictions on memory type index. \n
- If `pool` is not null, this member is ignored.
- */
- uint32_t memoryTypeBits;
- /** \brief Pool that this allocation should be created in.
-
- Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members:
- `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored.
- */
- VmaPool pool;
- /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData().
-
- If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either
- null or pointer to a null-terminated string. The string will be then copied to
- internal buffer, so it doesn't need to be valid after allocation call.
- */
- void* pUserData;
+ /// Use #VmaAllocationCreateFlagBits enum.
+ VmaAllocationCreateFlags flags;
+ /** \brief Intended usage of memory.
+
+ You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n
+ If `pool` is not null, this member is ignored.
+ */
+ VmaMemoryUsage usage;
+ /** \brief Flags that must be set in a Memory Type chosen for an allocation.
+
+ Leave 0 if you specify memory requirements in other way. \n
+ If `pool` is not null, this member is ignored.*/
+ VkMemoryPropertyFlags requiredFlags;
+ /** \brief Flags that preferably should be set in a memory type chosen for an allocation.
+
+ Set to 0 if no additional flags are prefered. \n
+ If `pool` is not null, this member is ignored. */
+ VkMemoryPropertyFlags preferredFlags;
+ /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation.
+
+ Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if
+ it meets other requirements specified by this structure, with no further
+ restrictions on memory type index. \n
+ If `pool` is not null, this member is ignored.
+ */
+ uint32_t memoryTypeBits;
+ /** \brief Pool that this allocation should be created in.
+
+ Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members:
+ `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored.
+ */
+ VmaPool pool;
+ /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData().
+
+ If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either
+ null or pointer to a null-terminated string. The string will be then copied to
+ internal buffer, so it doesn't need to be valid after allocation call.
+ */
+ void* pUserData;
} VmaAllocationCreateInfo;
/**
@@ -2575,10 +2496,10 @@ type of resource you want to use it for. Please check parameters of your
resource, like image layout (OPTIMAL versus LINEAR) or mip level count.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex(
- VmaAllocator allocator,
- uint32_t memoryTypeBits,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex);
+ VmaAllocator allocator,
+ uint32_t memoryTypeBits,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ uint32_t* pMemoryTypeIndex);
/**
\brief Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
@@ -2593,10 +2514,10 @@ It is just a convenience function, equivalent to calling:
- `vkDestroyBuffer`
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex);
+ VmaAllocator allocator,
+ const VkBufferCreateInfo* pBufferCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ uint32_t* pMemoryTypeIndex);
/**
\brief Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
@@ -2611,142 +2532,142 @@ It is just a convenience function, equivalent to calling:
- `vkDestroyImage`
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex);
+ VmaAllocator allocator,
+ const VkImageCreateInfo* pImageCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ uint32_t* pMemoryTypeIndex);
/// Flags to be passed as VmaPoolCreateInfo::flags.
typedef enum VmaPoolCreateFlagBits {
- /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored.
+ /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored.
- This is an optional optimization flag.
+ This is an optional optimization flag.
- If you always allocate using vmaCreateBuffer(), vmaCreateImage(),
- vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator
- knows exact type of your allocations so it can handle Buffer-Image Granularity
- in the optimal way.
+ If you always allocate using vmaCreateBuffer(), vmaCreateImage(),
+ vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator
+ knows exact type of your allocations so it can handle Buffer-Image Granularity
+ in the optimal way.
- If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(),
- exact type of such allocations is not known, so allocator must be conservative
- in handling Buffer-Image Granularity, which can lead to suboptimal allocation
- (wasted memory). In that case, if you can make sure you always allocate only
- buffers and linear images or only optimal images out of this pool, use this flag
- to make allocator disregard Buffer-Image Granularity and so make allocations
- faster and more optimal.
- */
- VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002,
+ If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(),
+ exact type of such allocations is not known, so allocator must be conservative
+ in handling Buffer-Image Granularity, which can lead to suboptimal allocation
+ (wasted memory). In that case, if you can make sure you always allocate only
+ buffers and linear images or only optimal images out of this pool, use this flag
+ to make allocator disregard Buffer-Image Granularity and so make allocations
+ faster and more optimal.
+ */
+ VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002,
- /** \brief Enables alternative, linear allocation algorithm in this pool.
+ /** \brief Enables alternative, linear allocation algorithm in this pool.
- Specify this flag to enable linear allocation algorithm, which always creates
- new allocations after last one and doesn't reuse space from allocations freed in
- between. It trades memory consumption for simplified algorithm and data
- structure, which has better performance and uses less memory for metadata.
+ Specify this flag to enable linear allocation algorithm, which always creates
+ new allocations after last one and doesn't reuse space from allocations freed in
+ between. It trades memory consumption for simplified algorithm and data
+ structure, which has better performance and uses less memory for metadata.
- By using this flag, you can achieve behavior of free-at-once, stack,
- ring buffer, and double stack. For details, see documentation chapter
- \ref linear_algorithm.
+ By using this flag, you can achieve behavior of free-at-once, stack,
+ ring buffer, and double stack. For details, see documentation chapter
+ \ref linear_algorithm.
- When using this flag, you must specify VmaPoolCreateInfo::maxBlockCount == 1 (or 0 for default).
+ When using this flag, you must specify VmaPoolCreateInfo::maxBlockCount == 1 (or 0 for default).
- For more details, see [Linear allocation algorithm](@ref linear_algorithm).
- */
- VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004,
+ For more details, see [Linear allocation algorithm](@ref linear_algorithm).
+ */
+ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004,
- /** \brief Enables alternative, buddy allocation algorithm in this pool.
+ /** \brief Enables alternative, buddy allocation algorithm in this pool.
- It operates on a tree of blocks, each having size that is a power of two and
- a half of its parent's size. Comparing to default algorithm, this one provides
- faster allocation and deallocation and decreased external fragmentation,
- at the expense of more memory wasted (internal fragmentation).
+ It operates on a tree of blocks, each having size that is a power of two and
+ a half of its parent's size. Comparing to default algorithm, this one provides
+ faster allocation and deallocation and decreased external fragmentation,
+ at the expense of more memory wasted (internal fragmentation).
- For more details, see [Buddy allocation algorithm](@ref buddy_algorithm).
- */
- VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT = 0x00000008,
+ For more details, see [Buddy allocation algorithm](@ref buddy_algorithm).
+ */
+ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT = 0x00000008,
- /** Bit mask to extract only `ALGORITHM` bits from entire set of flags.
- */
- VMA_POOL_CREATE_ALGORITHM_MASK =
- VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT |
- VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT,
+ /** Bit mask to extract only `ALGORITHM` bits from entire set of flags.
+ */
+ VMA_POOL_CREATE_ALGORITHM_MASK =
+ VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT |
+ VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT,
- VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+ VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VmaPoolCreateFlagBits;
typedef VkFlags VmaPoolCreateFlags;
/** \brief Describes parameter of created #VmaPool.
*/
typedef struct VmaPoolCreateInfo {
- /** \brief Vulkan memory type index to allocate this pool from.
- */
- uint32_t memoryTypeIndex;
- /** \brief Use combination of #VmaPoolCreateFlagBits.
- */
- VmaPoolCreateFlags flags;
- /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional.
-
- Specify nonzero to set explicit, constant size of memory blocks used by this
- pool.
-
- Leave 0 to use default and let the library manage block sizes automatically.
- Sizes of particular blocks may vary.
- */
- VkDeviceSize blockSize;
- /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty.
-
- Set to 0 to have no preallocated blocks and allow the pool be completely empty.
- */
- size_t minBlockCount;
- /** \brief Maximum number of blocks that can be allocated in this pool. Optional.
-
- Set to 0 to use default, which is `SIZE_MAX`, which means no limit.
-
- Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated
- throughout whole lifetime of this pool.
- */
- size_t maxBlockCount;
- /** \brief Maximum number of additional frames that are in use at the same time as current frame.
-
- This value is used only when you make allocations with
- #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
- lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
-
- For example, if you double-buffer your command buffers, so resources used for
- rendering in previous frame may still be in use by the GPU at the moment you
- allocate resources needed for the current frame, set this value to 1.
-
- If you want to allow any allocations other than used in the current frame to
- become lost, set this value to 0.
- */
- uint32_t frameInUseCount;
+ /** \brief Vulkan memory type index to allocate this pool from.
+ */
+ uint32_t memoryTypeIndex;
+ /** \brief Use combination of #VmaPoolCreateFlagBits.
+ */
+ VmaPoolCreateFlags flags;
+ /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional.
+
+ Specify nonzero to set explicit, constant size of memory blocks used by this
+ pool.
+
+ Leave 0 to use default and let the library manage block sizes automatically.
+ Sizes of particular blocks may vary.
+ */
+ VkDeviceSize blockSize;
+ /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty.
+
+ Set to 0 to have no preallocated blocks and allow the pool be completely empty.
+ */
+ size_t minBlockCount;
+ /** \brief Maximum number of blocks that can be allocated in this pool. Optional.
+
+ Set to 0 to use default, which is `SIZE_MAX`, which means no limit.
+
+ Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated
+ throughout whole lifetime of this pool.
+ */
+ size_t maxBlockCount;
+ /** \brief Maximum number of additional frames that are in use at the same time as current frame.
+
+ This value is used only when you make allocations with
+ #VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT flag. Such allocation cannot become
+ lost if allocation.lastUseFrameIndex >= allocator.currentFrameIndex - frameInUseCount.
+
+ For example, if you double-buffer your command buffers, so resources used for
+ rendering in previous frame may still be in use by the GPU at the moment you
+ allocate resources needed for the current frame, set this value to 1.
+
+ If you want to allow any allocations other than used in the current frame to
+ become lost, set this value to 0.
+ */
+ uint32_t frameInUseCount;
} VmaPoolCreateInfo;
/** \brief Describes parameter of existing #VmaPool.
*/
typedef struct VmaPoolStats {
- /** \brief Total amount of `VkDeviceMemory` allocated from Vulkan for this pool, in bytes.
- */
- VkDeviceSize size;
- /** \brief Total number of bytes in the pool not used by any #VmaAllocation.
- */
- VkDeviceSize unusedSize;
- /** \brief Number of #VmaAllocation objects created from this pool that were not destroyed or lost.
- */
- size_t allocationCount;
- /** \brief Number of continuous memory ranges in the pool not used by any #VmaAllocation.
- */
- size_t unusedRangeCount;
- /** \brief Size of the largest continuous free memory region available for new allocation.
-
- Making a new allocation of that size is not guaranteed to succeed because of
- possible additional margin required to respect alignment and buffer/image
- granularity.
- */
- VkDeviceSize unusedRangeSizeMax;
- /** \brief Number of `VkDeviceMemory` blocks allocated for this pool.
- */
- size_t blockCount;
+ /** \brief Total amount of `VkDeviceMemory` allocated from Vulkan for this pool, in bytes.
+ */
+ VkDeviceSize size;
+ /** \brief Total number of bytes in the pool not used by any #VmaAllocation.
+ */
+ VkDeviceSize unusedSize;
+ /** \brief Number of #VmaAllocation objects created from this pool that were not destroyed or lost.
+ */
+ size_t allocationCount;
+ /** \brief Number of continuous memory ranges in the pool not used by any #VmaAllocation.
+ */
+ size_t unusedRangeCount;
+ /** \brief Size of the largest continuous free memory region available for new allocation.
+
+ Making a new allocation of that size is not guaranteed to succeed because of
+ possible additional margin required to respect alignment and buffer/image
+ granularity.
+ */
+ VkDeviceSize unusedRangeSizeMax;
+ /** \brief Number of `VkDeviceMemory` blocks allocated for this pool.
+ */
+ size_t blockCount;
} VmaPoolStats;
/** \brief Allocates Vulkan device memory and creates #VmaPool object.
@@ -2763,8 +2684,8 @@ VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool(
/** \brief Destroys #VmaPool object and frees Vulkan device memory.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool(
- VmaAllocator allocator,
- VmaPool pool);
+ VmaAllocator allocator,
+ VmaPool pool);
/** \brief Retrieves statistics of existing #VmaPool object.
@@ -2773,9 +2694,9 @@ VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool(
@param[out] pPoolStats Statistics of specified pool.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStats(
- VmaAllocator allocator,
- VmaPool pool,
- VmaPoolStats* pPoolStats);
+ VmaAllocator allocator,
+ VmaPool pool,
+ VmaPoolStats* pPoolStats);
/** \brief Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInfo::frameInUseCount back from now.
@@ -2784,9 +2705,9 @@ VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStats(
@param[out] pLostAllocationCount Number of allocations marked as lost. Optional - pass null if you don't need this information.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaMakePoolAllocationsLost(
- VmaAllocator allocator,
- VmaPool pool,
- size_t* pLostAllocationCount);
+ VmaAllocator allocator,
+ VmaPool pool,
+ size_t* pLostAllocationCount);
/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions.
@@ -2811,9 +2732,9 @@ containing name of the pool that was previously set. The pointer becomes invalid
destroyed or its name is changed using vmaSetPoolName().
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName(
- VmaAllocator allocator,
- VmaPool pool,
- const char** ppName);
+ VmaAllocator allocator,
+ VmaPool pool,
+ const char** ppName);
/** \brief Sets name of a custom pool.
@@ -2821,9 +2742,9 @@ VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName(
Function makes internal copy of the string, so it can be changed or freed immediately after this call.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName(
- VmaAllocator allocator,
- VmaPool pool,
- const char* pName);
+ VmaAllocator allocator,
+ VmaPool pool,
+ const char* pName);
/** \struct VmaAllocation
\brief Represents single memory allocation.
@@ -2854,44 +2775,44 @@ VK_DEFINE_HANDLE(VmaAllocation)
/** \brief Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
*/
typedef struct VmaAllocationInfo {
- /** \brief Memory type index that this allocation was allocated from.
-
- It never changes.
- */
- uint32_t memoryType;
- /** \brief Handle to Vulkan memory object.
-
- Same memory object can be shared by multiple allocations.
-
- It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
-
- If the allocation is lost, it is equal to `VK_NULL_HANDLE`.
- */
- VkDeviceMemory deviceMemory;
- /** \brief Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
-
- It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
- */
- VkDeviceSize offset;
- /** \brief Size of this allocation, in bytes.
-
- It never changes, unless allocation is lost.
- */
- VkDeviceSize size;
- /** \brief Pointer to the beginning of this allocation as mapped data.
-
- If the allocation hasn't been mapped using vmaMapMemory() and hasn't been
- created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value null.
-
- It can change after call to vmaMapMemory(), vmaUnmapMemory().
- It can also change after call to vmaDefragment() if this allocation is passed to the function.
- */
- void* pMappedData;
- /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData().
-
- It can change after call to vmaSetAllocationUserData() for this allocation.
- */
- void* pUserData;
+ /** \brief Memory type index that this allocation was allocated from.
+
+ It never changes.
+ */
+ uint32_t memoryType;
+ /** \brief Handle to Vulkan memory object.
+
+ Same memory object can be shared by multiple allocations.
+
+ It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
+
+ If the allocation is lost, it is equal to `VK_NULL_HANDLE`.
+ */
+ VkDeviceMemory deviceMemory;
+ /** \brief Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
+
+ It can change after call to vmaDefragment() if this allocation is passed to the function, or if allocation is lost.
+ */
+ VkDeviceSize offset;
+ /** \brief Size of this allocation, in bytes.
+
+ It never changes, unless allocation is lost.
+ */
+ VkDeviceSize size;
+ /** \brief Pointer to the beginning of this allocation as mapped data.
+
+ If the allocation hasn't been mapped using vmaMapMemory() and hasn't been
+ created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value null.
+
+ It can change after call to vmaMapMemory(), vmaUnmapMemory().
+ It can also change after call to vmaDefragment() if this allocation is passed to the function.
+ */
+ void* pMappedData;
+ /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData().
+
+ It can change after call to vmaSetAllocationUserData() for this allocation.
+ */
+ void* pUserData;
} VmaAllocationInfo;
/** \brief General purpose memory allocation.
@@ -2905,11 +2826,11 @@ It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage
vmaCreateBuffer(), vmaCreateImage() instead whenever possible.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ const VkMemoryRequirements* pVkMemoryRequirements,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo);
/** \brief General purpose memory allocation for multiple allocation objects at once.
@@ -2931,12 +2852,12 @@ If any allocation fails, all allocations already made within this function call
returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- size_t allocationCount,
- VmaAllocation* pAllocations,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ const VkMemoryRequirements* pVkMemoryRequirements,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ size_t allocationCount,
+ VmaAllocation* pAllocations,
+ VmaAllocationInfo* pAllocationInfo);
/**
@param[out] pAllocation Handle to allocated memory.
@@ -2945,27 +2866,27 @@ VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages(
You should free the memory using vmaFreeMemory().
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ VkBuffer buffer,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo);
/// Function similar to vmaAllocateMemoryForBuffer().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage(
- VmaAllocator allocator,
- VkImage image,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ VkImage image,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo);
/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory(
- VmaAllocator allocator,
- VmaAllocation allocation);
+ VmaAllocator allocator,
+ VmaAllocation allocation);
/** \brief Frees memory and destroys multiple allocations.
@@ -2978,21 +2899,20 @@ Allocations in `pAllocations` array can come from any memory pools and types.
Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages(
- VmaAllocator allocator,
- size_t allocationCount,
- VmaAllocation* pAllocations);
+ VmaAllocator allocator,
+ size_t allocationCount,
+ VmaAllocation* pAllocations);
/** \brief Deprecated.
-\deprecated
In version 2.2.0 it used to try to change allocation's size without moving or reallocating it.
In current version it returns `VK_SUCCESS` only if `newSize` equals current allocation's size.
Otherwise returns `VK_ERROR_OUT_OF_POOL_MEMORY`, indicating that allocation's size could not be changed.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaResizeAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize newSize);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize newSize);
/** \brief Returns current information about specified allocation and atomically marks it as used in current frame.
@@ -3011,9 +2931,9 @@ you can avoid calling it too often.
- If you just want to check if allocation is not lost, vmaTouchAllocation() will work faster.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VmaAllocationInfo* pAllocationInfo);
/** \brief Returns `VK_TRUE` if allocation is not lost and atomically marks it as used in current frame.
@@ -3030,8 +2950,8 @@ If the allocation has been created without #VMA_ALLOCATION_CREATE_CAN_BECOME_LOS
this function always returns `VK_TRUE`.
*/
VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaTouchAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation);
+ VmaAllocator allocator,
+ VmaAllocation allocation);
/** \brief Sets pUserData in given allocation to new value.
@@ -3047,9 +2967,9 @@ allocation's `pUserData`. It is opaque, so you can use it however you want - e.g
as a pointer, ordinal number or some handle to you own data.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void* pUserData);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ void* pUserData);
/** \brief Creates new allocation that is in lost state from the beginning.
@@ -3062,8 +2982,8 @@ not bound to any image or buffer. It has size = 0. It cannot be turned into
a real, non-empty allocation.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaCreateLostAllocation(
- VmaAllocator allocator,
- VmaAllocation* pAllocation);
+ VmaAllocator allocator,
+ VmaAllocation* pAllocation);
/** \brief Maps memory represented by given allocation and returns pointer to it.
@@ -3104,9 +3024,9 @@ If the allocation is made from a memory types that is not `HOST_COHERENT`,
you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void** ppData);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ void** ppData);
/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
@@ -3117,8 +3037,8 @@ If the allocation is made from a memory types that is not `HOST_COHERENT`,
you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation);
+ VmaAllocator allocator,
+ VmaAllocation allocation);
/** \brief Flushes memory of given allocation.
@@ -3188,8 +3108,7 @@ VK_DEFINE_HANDLE(VmaDefragmentationContext)
/// Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
typedef enum VmaDefragmentationFlagBits {
- VMA_DEFRAGMENTATION_FLAG_INCREMENTAL = 0x1,
- VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+ VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VmaDefragmentationFlagBits;
typedef VkFlags VmaDefragmentationFlags;
@@ -3198,119 +3117,104 @@ typedef VkFlags VmaDefragmentationFlags;
To be used with function vmaDefragmentationBegin().
*/
typedef struct VmaDefragmentationInfo2 {
- /** \brief Reserved for future use. Should be 0.
- */
- VmaDefragmentationFlags flags;
- /** \brief Number of allocations in `pAllocations` array.
- */
- uint32_t allocationCount;
- /** \brief Pointer to array of allocations that can be defragmented.
-
- The array should have `allocationCount` elements.
- The array should not contain nulls.
- Elements in the array should be unique - same allocation cannot occur twice.
- It is safe to pass allocations that are in the lost state - they are ignored.
- All allocations not present in this array are considered non-moveable during this defragmentation.
- */
- VmaAllocation* pAllocations;
- /** \brief Optional, output. Pointer to array that will be filled with information whether the allocation at certain index has been changed during defragmentation.
-
- The array should have `allocationCount` elements.
- You can pass null if you are not interested in this information.
- */
- VkBool32* pAllocationsChanged;
- /** \brief Numer of pools in `pPools` array.
- */
- uint32_t poolCount;
- /** \brief Either null or pointer to array of pools to be defragmented.
-
- All the allocations in the specified pools can be moved during defragmentation
- and there is no way to check if they were really moved as in `pAllocationsChanged`,
- so you must query all the allocations in all these pools for new `VkDeviceMemory`
- and offset using vmaGetAllocationInfo() if you might need to recreate buffers
- and images bound to them.
-
- The array should have `poolCount` elements.
- The array should not contain nulls.
- Elements in the array should be unique - same pool cannot occur twice.
-
- Using this array is equivalent to specifying all allocations from the pools in `pAllocations`.
- It might be more efficient.
- */
- VmaPool* pPools;
- /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on CPU side, like `memcpy()`, `memmove()`.
-
- `VK_WHOLE_SIZE` means no limit.
- */
- VkDeviceSize maxCpuBytesToMove;
- /** \brief Maximum number of allocations that can be moved to a different place using transfers on CPU side, like `memcpy()`, `memmove()`.
-
- `UINT32_MAX` means no limit.
- */
- uint32_t maxCpuAllocationsToMove;
- /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on GPU side, posted to `commandBuffer`.
-
- `VK_WHOLE_SIZE` means no limit.
- */
- VkDeviceSize maxGpuBytesToMove;
- /** \brief Maximum number of allocations that can be moved to a different place using transfers on GPU side, posted to `commandBuffer`.
-
- `UINT32_MAX` means no limit.
- */
- uint32_t maxGpuAllocationsToMove;
- /** \brief Optional. Command buffer where GPU copy commands will be posted.
-
- If not null, it must be a valid command buffer handle that supports Transfer queue type.
- It must be in the recording state and outside of a render pass instance.
- You need to submit it and make sure it finished execution before calling vmaDefragmentationEnd().
-
- Passing null means that only CPU defragmentation will be performed.
- */
- VkCommandBuffer commandBuffer;
+ /** \brief Reserved for future use. Should be 0.
+ */
+ VmaDefragmentationFlags flags;
+ /** \brief Number of allocations in `pAllocations` array.
+ */
+ uint32_t allocationCount;
+ /** \brief Pointer to array of allocations that can be defragmented.
+
+ The array should have `allocationCount` elements.
+ The array should not contain nulls.
+ Elements in the array should be unique - same allocation cannot occur twice.
+ It is safe to pass allocations that are in the lost state - they are ignored.
+ All allocations not present in this array are considered non-moveable during this defragmentation.
+ */
+ VmaAllocation* pAllocations;
+ /** \brief Optional, output. Pointer to array that will be filled with information whether the allocation at certain index has been changed during defragmentation.
+
+ The array should have `allocationCount` elements.
+ You can pass null if you are not interested in this information.
+ */
+ VkBool32* pAllocationsChanged;
+ /** \brief Numer of pools in `pPools` array.
+ */
+ uint32_t poolCount;
+ /** \brief Either null or pointer to array of pools to be defragmented.
+
+ All the allocations in the specified pools can be moved during defragmentation
+ and there is no way to check if they were really moved as in `pAllocationsChanged`,
+ so you must query all the allocations in all these pools for new `VkDeviceMemory`
+ and offset using vmaGetAllocationInfo() if you might need to recreate buffers
+ and images bound to them.
+
+ The array should have `poolCount` elements.
+ The array should not contain nulls.
+ Elements in the array should be unique - same pool cannot occur twice.
+
+ Using this array is equivalent to specifying all allocations from the pools in `pAllocations`.
+ It might be more efficient.
+ */
+ VmaPool* pPools;
+ /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on CPU side, like `memcpy()`, `memmove()`.
+
+ `VK_WHOLE_SIZE` means no limit.
+ */
+ VkDeviceSize maxCpuBytesToMove;
+ /** \brief Maximum number of allocations that can be moved to a different place using transfers on CPU side, like `memcpy()`, `memmove()`.
+
+ `UINT32_MAX` means no limit.
+ */
+ uint32_t maxCpuAllocationsToMove;
+ /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places using transfers on GPU side, posted to `commandBuffer`.
+
+ `VK_WHOLE_SIZE` means no limit.
+ */
+ VkDeviceSize maxGpuBytesToMove;
+ /** \brief Maximum number of allocations that can be moved to a different place using transfers on GPU side, posted to `commandBuffer`.
+
+ `UINT32_MAX` means no limit.
+ */
+ uint32_t maxGpuAllocationsToMove;
+ /** \brief Optional. Command buffer where GPU copy commands will be posted.
+
+ If not null, it must be a valid command buffer handle that supports Transfer queue type.
+ It must be in the recording state and outside of a render pass instance.
+ You need to submit it and make sure it finished execution before calling vmaDefragmentationEnd().
+
+ Passing null means that only CPU defragmentation will be performed.
+ */
+ VkCommandBuffer commandBuffer;
} VmaDefragmentationInfo2;
-typedef struct VmaDefragmentationPassMoveInfo {
- VmaAllocation allocation;
- VkDeviceMemory memory;
- VkDeviceSize offset;
-} VmaDefragmentationPassMoveInfo;
-
-/** \brief Parameters for incremental defragmentation steps.
-
-To be used with function vmaBeginDefragmentationPass().
-*/
-typedef struct VmaDefragmentationPassInfo {
- uint32_t moveCount;
- VmaDefragmentationPassMoveInfo* pMoves;
-} VmaDefragmentationPassInfo;
-
/** \brief Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
\deprecated This is a part of the old interface. It is recommended to use structure #VmaDefragmentationInfo2 and function vmaDefragmentationBegin() instead.
*/
typedef struct VmaDefragmentationInfo {
- /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places.
-
- Default is `VK_WHOLE_SIZE`, which means no limit.
- */
- VkDeviceSize maxBytesToMove;
- /** \brief Maximum number of allocations that can be moved to different place.
-
- Default is `UINT32_MAX`, which means no limit.
- */
- uint32_t maxAllocationsToMove;
+ /** \brief Maximum total numbers of bytes that can be copied while moving allocations to different places.
+
+ Default is `VK_WHOLE_SIZE`, which means no limit.
+ */
+ VkDeviceSize maxBytesToMove;
+ /** \brief Maximum number of allocations that can be moved to different place.
+
+ Default is `UINT32_MAX`, which means no limit.
+ */
+ uint32_t maxAllocationsToMove;
} VmaDefragmentationInfo;
/** \brief Statistics returned by function vmaDefragment(). */
typedef struct VmaDefragmentationStats {
- /// Total number of bytes that have been copied while moving allocations to different places.
- VkDeviceSize bytesMoved;
- /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects.
- VkDeviceSize bytesFreed;
- /// Number of allocations that have been moved to different places.
- uint32_t allocationsMoved;
- /// Number of empty `VkDeviceMemory` objects that have been released to the system.
- uint32_t deviceMemoryBlocksFreed;
+ /// Total number of bytes that have been copied while moving allocations to different places.
+ VkDeviceSize bytesMoved;
+ /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects.
+ VkDeviceSize bytesFreed;
+ /// Number of allocations that have been moved to different places.
+ uint32_t allocationsMoved;
+ /// Number of empty `VkDeviceMemory` objects that have been released to the system.
+ uint32_t deviceMemoryBlocksFreed;
} VmaDefragmentationStats;
/** \brief Begins defragmentation process.
@@ -3343,10 +3247,10 @@ For more information and important limitations regarding defragmentation, see do
[Defragmentation](@ref defragmentation).
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaDefragmentationBegin(
- VmaAllocator allocator,
- const VmaDefragmentationInfo2* pInfo,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext *pContext);
+ VmaAllocator allocator,
+ const VmaDefragmentationInfo2* pInfo,
+ VmaDefragmentationStats* pStats,
+ VmaDefragmentationContext *pContext);
/** \brief Ends defragmentation process.
@@ -3354,18 +3258,8 @@ Use this function to finish defragmentation started by vmaDefragmentationBegin()
It is safe to pass `context == null`. The function then does nothing.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaDefragmentationEnd(
- VmaAllocator allocator,
- VmaDefragmentationContext context);
-
-VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass(
- VmaAllocator allocator,
- VmaDefragmentationContext context,
- VmaDefragmentationPassInfo* pInfo
-);
-VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass(
- VmaAllocator allocator,
- VmaDefragmentationContext context
-);
+ VmaAllocator allocator,
+ VmaDefragmentationContext context);
/** \brief Deprecated. Compacts memory by moving allocations.
@@ -3408,12 +3302,12 @@ you should measure that on your platform.
For more information, see [Defragmentation](@ref defragmentation) chapter.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaDefragment(
- VmaAllocator allocator,
- VmaAllocation* pAllocations,
- size_t allocationCount,
- VkBool32* pAllocationsChanged,
- const VmaDefragmentationInfo *pDefragmentationInfo,
- VmaDefragmentationStats* pDefragmentationStats);
+ VmaAllocator allocator,
+ VmaAllocation* pAllocations,
+ size_t allocationCount,
+ VkBool32* pAllocationsChanged,
+ const VmaDefragmentationInfo *pDefragmentationInfo,
+ VmaDefragmentationStats* pDefragmentationStats);
/** \brief Binds buffer to allocation.
@@ -3428,9 +3322,9 @@ allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from mul
It is recommended to use function vmaCreateBuffer() instead of this one.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkBuffer buffer);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkBuffer buffer);
/** \brief Binds buffer to allocation with additional parameters.
@@ -3443,11 +3337,11 @@ If `pNext` is not null, #VmaAllocator object must have been created with #VMA_AL
or with VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_1`. Otherwise the call fails.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize allocationLocalOffset,
- VkBuffer buffer,
- const void* pNext);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize allocationLocalOffset,
+ VkBuffer buffer,
+ const void* pNext);
/** \brief Binds image to allocation.
@@ -3462,9 +3356,9 @@ allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from mul
It is recommended to use function vmaCreateImage() instead of this one.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkImage image);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkImage image);
/** \brief Binds image to allocation with additional parameters.
@@ -3477,11 +3371,11 @@ If `pNext` is not null, #VmaAllocator object must have been created with #VMA_AL
or with VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_1`. Otherwise the call fails.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize allocationLocalOffset,
- VkImage image,
- const void* pNext);
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize allocationLocalOffset,
+ VkImage image,
+ const void* pNext);
/**
@param[out] pBuffer Buffer that was created.
@@ -3510,12 +3404,12 @@ allocation for this buffer, just like when using
VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkBuffer* pBuffer,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ const VkBufferCreateInfo* pBufferCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ VkBuffer* pBuffer,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo);
/** \brief Destroys Vulkan buffer and frees allocated memory.
@@ -3529,18 +3423,18 @@ vmaFreeMemory(allocator, allocation);
It it safe to pass null as buffer and/or allocation.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- VmaAllocation allocation);
+ VmaAllocator allocator,
+ VkBuffer buffer,
+ VmaAllocation allocation);
/// Function similar to vmaCreateBuffer().
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkImage* pImage,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo);
+ VmaAllocator allocator,
+ const VkImageCreateInfo* pImageCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ VkImage* pImage,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo);
/** \brief Destroys Vulkan image and frees allocated memory.
@@ -3554,9 +3448,9 @@ vmaFreeMemory(allocator, allocation);
It it safe to pass null as image and/or allocation.
*/
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage(
- VmaAllocator allocator,
- VkImage image,
- VmaAllocation allocation);
+ VmaAllocator allocator,
+ VkImage image,
+ VmaAllocation allocation);
#ifdef __cplusplus
}
@@ -3575,7 +3469,6 @@ VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage(
#include <cstdint>
#include <cstdlib>
#include <cstring>
-#include <utility>
/*******************************************************************************
CONFIGURATION SECTION
@@ -3588,7 +3481,7 @@ here if you need other then default behavior depending on your environment.
Define this macro to 1 to make the library fetch pointers to Vulkan functions
internally, like:
- vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
+ vulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
Define to 0 if you are going to provide you own pointers to Vulkan functions via
VmaAllocatorCreateInfo::pVulkanFunctions.
@@ -3613,17 +3506,17 @@ the containers.
#endif
#ifndef VMA_USE_STL_SHARED_MUTEX
- // Compiler conforms to C++17.
- #if __cplusplus >= 201703L
- #define VMA_USE_STL_SHARED_MUTEX 1
- // Visual studio defines __cplusplus properly only when passed additional parameter: /Zc:__cplusplus
- // Otherwise it's always 199711L, despite shared_mutex works since Visual Studio 2015 Update 2.
- // See: https://blogs.msdn.microsoft.com/vcblog/2018/04/09/msvc-now-correctly-reports-__cplusplus/
- #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
- #define VMA_USE_STL_SHARED_MUTEX 1
- #else
- #define VMA_USE_STL_SHARED_MUTEX 0
- #endif
+ // Compiler conforms to C++17.
+ #if __cplusplus >= 201703L
+ #define VMA_USE_STL_SHARED_MUTEX 1
+ // Visual studio defines __cplusplus properly only when passed additional parameter: /Zc:__cplusplus
+ // Otherwise it's always 199711L, despite shared_mutex works since Visual Studio 2015 Update 2.
+ // See: https://blogs.msdn.microsoft.com/vcblog/2018/04/09/msvc-now-correctly-reports-__cplusplus/
+ #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
+ #define VMA_USE_STL_SHARED_MUTEX 1
+ #else
+ #define VMA_USE_STL_SHARED_MUTEX 0
+ #endif
#endif
/*
@@ -3659,28 +3552,28 @@ remove them if not needed.
#include <cstdlib>
void *aligned_alloc(size_t alignment, size_t size)
{
- // alignment must be >= sizeof(void*)
- if(alignment < sizeof(void*))
- {
- alignment = sizeof(void*);
- }
+ // alignment must be >= sizeof(void*)
+ if(alignment < sizeof(void*))
+ {
+ alignment = sizeof(void*);
+ }
- return memalign(alignment, size);
+ return memalign(alignment, size);
}
#elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
#include <cstdlib>
void *aligned_alloc(size_t alignment, size_t size)
{
- // alignment must be >= sizeof(void*)
- if(alignment < sizeof(void*))
- {
- alignment = sizeof(void*);
- }
+ // alignment must be >= sizeof(void*)
+ if(alignment < sizeof(void*))
+ {
+ alignment = sizeof(void*);
+ }
- void *pointer;
- if(posix_memalign(&pointer, alignment, size) == 0)
- return pointer;
- return VMA_NULL;
+ void *pointer;
+ if(posix_memalign(&pointer, alignment, size) == 0)
+ return pointer;
+ return VMA_NULL;
}
#endif
@@ -3691,20 +3584,20 @@ void *aligned_alloc(size_t alignment, size_t size)
// Normal assert to check for programmer's errors, especially in Debug configuration.
#ifndef VMA_ASSERT
- #ifdef NDEBUG
- #define VMA_ASSERT(expr)
+ #ifdef _DEBUG
+ #define VMA_ASSERT(expr) assert(expr)
#else
- #define VMA_ASSERT(expr) assert(expr)
+ #define VMA_ASSERT(expr)
#endif
#endif
// Assert that will be called very often, like inside data structures e.g. operator[].
// Making it non-empty can make program slow.
#ifndef VMA_HEAVY_ASSERT
- #ifdef NDEBUG
- #define VMA_HEAVY_ASSERT(expr)
+ #ifdef _DEBUG
+ #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
#else
- #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
+ #define VMA_HEAVY_ASSERT(expr)
#endif
#endif
@@ -3714,17 +3607,17 @@ void *aligned_alloc(size_t alignment, size_t size)
#ifndef VMA_SYSTEM_ALIGNED_MALLOC
#if defined(_WIN32)
- #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
+ #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
#else
- #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
+ #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
#endif
#endif
#ifndef VMA_SYSTEM_FREE
#if defined(_WIN32)
- #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
+ #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
#else
- #define VMA_SYSTEM_FREE(ptr) free(ptr)
+ #define VMA_SYSTEM_FREE(ptr) free(ptr)
#endif
#endif
@@ -3748,162 +3641,155 @@ void *aligned_alloc(size_t alignment, size_t size)
#define VMA_DEBUG_LOG(format, ...)
/*
#define VMA_DEBUG_LOG(format, ...) do { \
- printf(format, __VA_ARGS__); \
- printf("\n"); \
+ printf(format, __VA_ARGS__); \
+ printf("\n"); \
} while(false)
*/
#endif
// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString.
#if VMA_STATS_STRING_ENABLED
- static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
- {
- snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
- }
- static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
- {
- snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
- }
- static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
- {
- snprintf(outStr, strLen, "%p", ptr);
- }
+ static inline void VmaUint32ToStr(char* outStr, size_t strLen, uint32_t num)
+ {
+ snprintf(outStr, strLen, "%u", static_cast<unsigned int>(num));
+ }
+ static inline void VmaUint64ToStr(char* outStr, size_t strLen, uint64_t num)
+ {
+ snprintf(outStr, strLen, "%llu", static_cast<unsigned long long>(num));
+ }
+ static inline void VmaPtrToStr(char* outStr, size_t strLen, const void* ptr)
+ {
+ snprintf(outStr, strLen, "%p", ptr);
+ }
#endif
#ifndef VMA_MUTEX
- class VmaMutex
- {
- public:
- void Lock() { m_Mutex.lock(); }
- void Unlock() { m_Mutex.unlock(); }
- bool TryLock() { return m_Mutex.try_lock(); }
- private:
- std::mutex m_Mutex;
- };
- #define VMA_MUTEX VmaMutex
+ class VmaMutex
+ {
+ public:
+ void Lock() { m_Mutex.lock(); }
+ void Unlock() { m_Mutex.unlock(); }
+ private:
+ std::mutex m_Mutex;
+ };
+ #define VMA_MUTEX VmaMutex
#endif
// Read-write mutex, where "read" is shared access, "write" is exclusive access.
#ifndef VMA_RW_MUTEX
- #if VMA_USE_STL_SHARED_MUTEX
- // Use std::shared_mutex from C++17.
- #include <shared_mutex>
- class VmaRWMutex
- {
- public:
- void LockRead() { m_Mutex.lock_shared(); }
- void UnlockRead() { m_Mutex.unlock_shared(); }
- bool TryLockRead() { return m_Mutex.try_lock_shared(); }
- void LockWrite() { m_Mutex.lock(); }
- void UnlockWrite() { m_Mutex.unlock(); }
- bool TryLockWrite() { return m_Mutex.try_lock(); }
- private:
- std::shared_mutex m_Mutex;
- };
- #define VMA_RW_MUTEX VmaRWMutex
- #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
- // Use SRWLOCK from WinAPI.
- // Minimum supported client = Windows Vista, server = Windows Server 2008.
- class VmaRWMutex
- {
- public:
- VmaRWMutex() { InitializeSRWLock(&m_Lock); }
- void LockRead() { AcquireSRWLockShared(&m_Lock); }
- void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
- bool TryLockRead() { return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
- void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
- void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
- bool TryLockWrite() { return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
- private:
- SRWLOCK m_Lock;
- };
- #define VMA_RW_MUTEX VmaRWMutex
- #else
- // Less efficient fallback: Use normal mutex.
- class VmaRWMutex
- {
- public:
- void LockRead() { m_Mutex.Lock(); }
- void UnlockRead() { m_Mutex.Unlock(); }
- bool TryLockRead() { return m_Mutex.TryLock(); }
- void LockWrite() { m_Mutex.Lock(); }
- void UnlockWrite() { m_Mutex.Unlock(); }
- bool TryLockWrite() { return m_Mutex.TryLock(); }
- private:
- VMA_MUTEX m_Mutex;
- };
- #define VMA_RW_MUTEX VmaRWMutex
- #endif // #if VMA_USE_STL_SHARED_MUTEX
+ #if VMA_USE_STL_SHARED_MUTEX
+ // Use std::shared_mutex from C++17.
+ #include <shared_mutex>
+ class VmaRWMutex
+ {
+ public:
+ void LockRead() { m_Mutex.lock_shared(); }
+ void UnlockRead() { m_Mutex.unlock_shared(); }
+ void LockWrite() { m_Mutex.lock(); }
+ void UnlockWrite() { m_Mutex.unlock(); }
+ private:
+ std::shared_mutex m_Mutex;
+ };
+ #define VMA_RW_MUTEX VmaRWMutex
+ #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
+ // Use SRWLOCK from WinAPI.
+ // Minimum supported client = Windows Vista, server = Windows Server 2008.
+ class VmaRWMutex
+ {
+ public:
+ VmaRWMutex() { InitializeSRWLock(&m_Lock); }
+ void LockRead() { AcquireSRWLockShared(&m_Lock); }
+ void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
+ void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
+ void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
+ private:
+ SRWLOCK m_Lock;
+ };
+ #define VMA_RW_MUTEX VmaRWMutex
+ #else
+ // Less efficient fallback: Use normal mutex.
+ class VmaRWMutex
+ {
+ public:
+ void LockRead() { m_Mutex.Lock(); }
+ void UnlockRead() { m_Mutex.Unlock(); }
+ void LockWrite() { m_Mutex.Lock(); }
+ void UnlockWrite() { m_Mutex.Unlock(); }
+ private:
+ VMA_MUTEX m_Mutex;
+ };
+ #define VMA_RW_MUTEX VmaRWMutex
+ #endif // #if VMA_USE_STL_SHARED_MUTEX
#endif // #ifndef VMA_RW_MUTEX
/*
If providing your own implementation, you need to implement a subset of std::atomic.
*/
#ifndef VMA_ATOMIC_UINT32
- #include <atomic>
- #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
+ #include <atomic>
+ #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
#endif
#ifndef VMA_ATOMIC_UINT64
- #include <atomic>
- #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
+ #include <atomic>
+ #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
#endif
#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
- /**
- Every allocation will have its own memory block.
- Define to 1 for debugging purposes only.
- */
- #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
+ /**
+ Every allocation will have its own memory block.
+ Define to 1 for debugging purposes only.
+ */
+ #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
#endif
#ifndef VMA_DEBUG_ALIGNMENT
- /**
- Minimum alignment of all allocations, in bytes.
- Set to more than 1 for debugging purposes only. Must be power of two.
- */
- #define VMA_DEBUG_ALIGNMENT (1)
+ /**
+ Minimum alignment of all allocations, in bytes.
+ Set to more than 1 for debugging purposes only. Must be power of two.
+ */
+ #define VMA_DEBUG_ALIGNMENT (1)
#endif
#ifndef VMA_DEBUG_MARGIN
- /**
- Minimum margin before and after every allocation, in bytes.
- Set nonzero for debugging purposes only.
- */
- #define VMA_DEBUG_MARGIN (0)
+ /**
+ Minimum margin before and after every allocation, in bytes.
+ Set nonzero for debugging purposes only.
+ */
+ #define VMA_DEBUG_MARGIN (0)
#endif
#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
- /**
- Define this macro to 1 to automatically fill new allocations and destroyed
- allocations with some bit pattern.
- */
- #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
+ /**
+ Define this macro to 1 to automatically fill new allocations and destroyed
+ allocations with some bit pattern.
+ */
+ #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
#endif
#ifndef VMA_DEBUG_DETECT_CORRUPTION
- /**
- Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to
- enable writing magic value to the margin before and after every allocation and
- validating it, so that memory corruptions (out-of-bounds writes) are detected.
- */
- #define VMA_DEBUG_DETECT_CORRUPTION (0)
+ /**
+ Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to
+ enable writing magic value to the margin before and after every allocation and
+ validating it, so that memory corruptions (out-of-bounds writes) are detected.
+ */
+ #define VMA_DEBUG_DETECT_CORRUPTION (0)
#endif
#ifndef VMA_DEBUG_GLOBAL_MUTEX
- /**
- Set this to 1 for debugging purposes only, to enable single mutex protecting all
- entry calls to the library. Can be useful for debugging multithreading issues.
- */
- #define VMA_DEBUG_GLOBAL_MUTEX (0)
+ /**
+ Set this to 1 for debugging purposes only, to enable single mutex protecting all
+ entry calls to the library. Can be useful for debugging multithreading issues.
+ */
+ #define VMA_DEBUG_GLOBAL_MUTEX (0)
#endif
#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
- /**
- Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity.
- Set to more than 1 for debugging purposes only. Must be power of two.
- */
- #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
+ /**
+ Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity.
+ Set to more than 1 for debugging purposes only. Must be power of two.
+ */
+ #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
#endif
#ifndef VMA_SMALL_HEAP_MAX_SIZE
@@ -3917,10 +3803,10 @@ If providing your own implementation, you need to implement a subset of std::ato
#endif
#ifndef VMA_CLASS_NO_COPY
- #define VMA_CLASS_NO_COPY(className) \
- private: \
- className(const className&) = delete; \
- className& operator=(const className&) = delete;
+ #define VMA_CLASS_NO_COPY(className) \
+ private: \
+ className(const className&) = delete; \
+ className& operator=(const className&) = delete;
#endif
static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
@@ -3935,16 +3821,10 @@ static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
END OF CONFIGURATION
*/
-// # Copy of some Vulkan definitions so we don't need to check their existence just to handle few constants.
-
-static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
-static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
-
-
static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
- VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
+ VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
// Returns number of bits set to 1 in (v).
static inline uint32_t VmaCountBitsSet(uint32_t v)
@@ -3969,7 +3849,7 @@ static inline T VmaAlignUp(T val, T align)
template <typename T>
static inline T VmaAlignDown(T val, T align)
{
- return val / align * align;
+ return val / align * align;
}
// Division with mathematical rounding to nearest number.
@@ -3987,78 +3867,78 @@ For 0 returns true.
template <typename T>
inline bool VmaIsPow2(T x)
{
- return (x & (x-1)) == 0;
+ return (x & (x-1)) == 0;
}
// Returns smallest power of 2 greater or equal to v.
static inline uint32_t VmaNextPow2(uint32_t v)
{
v--;
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v++;
- return v;
+ v |= v >> 1;
+ v |= v >> 2;
+ v |= v >> 4;
+ v |= v >> 8;
+ v |= v >> 16;
+ v++;
+ return v;
}
static inline uint64_t VmaNextPow2(uint64_t v)
{
v--;
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v |= v >> 32;
- v++;
- return v;
+ v |= v >> 1;
+ v |= v >> 2;
+ v |= v >> 4;
+ v |= v >> 8;
+ v |= v >> 16;
+ v |= v >> 32;
+ v++;
+ return v;
}
// Returns largest power of 2 less or equal to v.
static inline uint32_t VmaPrevPow2(uint32_t v)
{
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v = v ^ (v >> 1);
- return v;
+ v |= v >> 1;
+ v |= v >> 2;
+ v |= v >> 4;
+ v |= v >> 8;
+ v |= v >> 16;
+ v = v ^ (v >> 1);
+ return v;
}
static inline uint64_t VmaPrevPow2(uint64_t v)
{
- v |= v >> 1;
- v |= v >> 2;
- v |= v >> 4;
- v |= v >> 8;
- v |= v >> 16;
- v |= v >> 32;
- v = v ^ (v >> 1);
- return v;
+ v |= v >> 1;
+ v |= v >> 2;
+ v |= v >> 4;
+ v |= v >> 8;
+ v |= v >> 16;
+ v |= v >> 32;
+ v = v ^ (v >> 1);
+ return v;
}
static inline bool VmaStrIsEmpty(const char* pStr)
{
- return pStr == VMA_NULL || *pStr == '\0';
+ return pStr == VMA_NULL || *pStr == '\0';
}
#if VMA_STATS_STRING_ENABLED
static const char* VmaAlgorithmToStr(uint32_t algorithm)
{
- switch(algorithm)
- {
- case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
- return "Linear";
- case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
- return "Buddy";
- case 0:
- return "Default";
- default:
- VMA_ASSERT(0);
- return "";
- }
+ switch(algorithm)
+ {
+ case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
+ return "Linear";
+ case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
+ return "Buddy";
+ case 0:
+ return "Default";
+ default:
+ VMA_ASSERT(0);
+ return "";
+ }
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -4068,35 +3948,35 @@ static const char* VmaAlgorithmToStr(uint32_t algorithm)
template<typename Iterator, typename Compare>
Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
{
- Iterator centerValue = end; --centerValue;
- Iterator insertIndex = beg;
- for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
- {
- if(cmp(*memTypeIndex, *centerValue))
- {
- if(insertIndex != memTypeIndex)
- {
- VMA_SWAP(*memTypeIndex, *insertIndex);
- }
- ++insertIndex;
- }
- }
- if(insertIndex != centerValue)
- {
- VMA_SWAP(*insertIndex, *centerValue);
- }
- return insertIndex;
+ Iterator centerValue = end; --centerValue;
+ Iterator insertIndex = beg;
+ for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
+ {
+ if(cmp(*memTypeIndex, *centerValue))
+ {
+ if(insertIndex != memTypeIndex)
+ {
+ VMA_SWAP(*memTypeIndex, *insertIndex);
+ }
+ ++insertIndex;
+ }
+ }
+ if(insertIndex != centerValue)
+ {
+ VMA_SWAP(*insertIndex, *centerValue);
+ }
+ return insertIndex;
}
template<typename Iterator, typename Compare>
void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
{
- if(beg < end)
- {
- Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
- VmaQuickSort<Iterator, Compare>(beg, it, cmp);
- VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
- }
+ if(beg < end)
+ {
+ Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
+ VmaQuickSort<Iterator, Compare>(beg, it, cmp);
+ VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
+ }
}
#define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
@@ -4111,28 +3991,28 @@ Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulk
chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity".
*/
static inline bool VmaBlocksOnSamePage(
- VkDeviceSize resourceAOffset,
- VkDeviceSize resourceASize,
- VkDeviceSize resourceBOffset,
- VkDeviceSize pageSize)
+ VkDeviceSize resourceAOffset,
+ VkDeviceSize resourceASize,
+ VkDeviceSize resourceBOffset,
+ VkDeviceSize pageSize)
{
- VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
- VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
- VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
- VkDeviceSize resourceBStart = resourceBOffset;
- VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
- return resourceAEndPage == resourceBStartPage;
+ VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
+ VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
+ VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
+ VkDeviceSize resourceBStart = resourceBOffset;
+ VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
+ return resourceAEndPage == resourceBStartPage;
}
enum VmaSuballocationType
{
- VMA_SUBALLOCATION_TYPE_FREE = 0,
- VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
- VMA_SUBALLOCATION_TYPE_BUFFER = 2,
- VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
- VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
- VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
- VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+ VMA_SUBALLOCATION_TYPE_FREE = 0,
+ VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
+ VMA_SUBALLOCATION_TYPE_BUFFER = 2,
+ VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
+ VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
+ VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
+ VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
};
/*
@@ -4142,68 +4022,68 @@ or linear image and another one is optimal image. If type is unknown, behave
conservatively.
*/
static inline bool VmaIsBufferImageGranularityConflict(
- VmaSuballocationType suballocType1,
- VmaSuballocationType suballocType2)
-{
- if(suballocType1 > suballocType2)
- {
- VMA_SWAP(suballocType1, suballocType2);
- }
-
- switch(suballocType1)
- {
- case VMA_SUBALLOCATION_TYPE_FREE:
- return false;
- case VMA_SUBALLOCATION_TYPE_UNKNOWN:
- return true;
- case VMA_SUBALLOCATION_TYPE_BUFFER:
- return
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
- return
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
- return
- suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
- case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
- return false;
- default:
- VMA_ASSERT(0);
- return true;
- }
+ VmaSuballocationType suballocType1,
+ VmaSuballocationType suballocType2)
+{
+ if(suballocType1 > suballocType2)
+ {
+ VMA_SWAP(suballocType1, suballocType2);
+ }
+
+ switch(suballocType1)
+ {
+ case VMA_SUBALLOCATION_TYPE_FREE:
+ return false;
+ case VMA_SUBALLOCATION_TYPE_UNKNOWN:
+ return true;
+ case VMA_SUBALLOCATION_TYPE_BUFFER:
+ return
+ suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+ suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+ case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
+ return
+ suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+ suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
+ suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+ case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
+ return
+ suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
+ case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
+ return false;
+ default:
+ VMA_ASSERT(0);
+ return true;
+ }
}
static void VmaWriteMagicValue(void* pData, VkDeviceSize offset)
{
#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
- uint32_t* pDst = (uint32_t*)((char*)pData + offset);
- const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
- for(size_t i = 0; i < numberCount; ++i, ++pDst)
- {
- *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
- }
+ uint32_t* pDst = (uint32_t*)((char*)pData + offset);
+ const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
+ for(size_t i = 0; i < numberCount; ++i, ++pDst)
+ {
+ *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
+ }
#else
- // no-op
+ // no-op
#endif
}
static bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset)
{
#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
- const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset);
- const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
- for(size_t i = 0; i < numberCount; ++i, ++pSrc)
- {
- if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
- {
- return false;
- }
- }
+ const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset);
+ const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t);
+ for(size_t i = 0; i < numberCount; ++i, ++pSrc)
+ {
+ if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
+ {
+ return false;
+ }
+ }
#endif
- return true;
+ return true;
}
/*
@@ -4212,57 +4092,57 @@ during GPU memory defragmentation.
*/
static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
{
- memset(&outBufCreateInfo, 0, sizeof(outBufCreateInfo));
- outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
- outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
- outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; // Example size.
+ memset(&outBufCreateInfo, 0, sizeof(outBufCreateInfo));
+ outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+ outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+ outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; // Example size.
}
// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope).
struct VmaMutexLock
{
- VMA_CLASS_NO_COPY(VmaMutexLock)
+ VMA_CLASS_NO_COPY(VmaMutexLock)
public:
- VmaMutexLock(VMA_MUTEX& mutex, bool useMutex = true) :
- m_pMutex(useMutex ? &mutex : VMA_NULL)
- { if(m_pMutex) { m_pMutex->Lock(); } }
- ~VmaMutexLock()
- { if(m_pMutex) { m_pMutex->Unlock(); } }
+ VmaMutexLock(VMA_MUTEX& mutex, bool useMutex = true) :
+ m_pMutex(useMutex ? &mutex : VMA_NULL)
+ { if(m_pMutex) { m_pMutex->Lock(); } }
+ ~VmaMutexLock()
+ { if(m_pMutex) { m_pMutex->Unlock(); } }
private:
- VMA_MUTEX* m_pMutex;
+ VMA_MUTEX* m_pMutex;
};
// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading.
struct VmaMutexLockRead
{
- VMA_CLASS_NO_COPY(VmaMutexLockRead)
+ VMA_CLASS_NO_COPY(VmaMutexLockRead)
public:
- VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) :
- m_pMutex(useMutex ? &mutex : VMA_NULL)
- { if(m_pMutex) { m_pMutex->LockRead(); } }
- ~VmaMutexLockRead() { if(m_pMutex) { m_pMutex->UnlockRead(); } }
+ VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) :
+ m_pMutex(useMutex ? &mutex : VMA_NULL)
+ { if(m_pMutex) { m_pMutex->LockRead(); } }
+ ~VmaMutexLockRead() { if(m_pMutex) { m_pMutex->UnlockRead(); } }
private:
- VMA_RW_MUTEX* m_pMutex;
+ VMA_RW_MUTEX* m_pMutex;
};
// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing.
struct VmaMutexLockWrite
{
- VMA_CLASS_NO_COPY(VmaMutexLockWrite)
+ VMA_CLASS_NO_COPY(VmaMutexLockWrite)
public:
- VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) :
- m_pMutex(useMutex ? &mutex : VMA_NULL)
- { if(m_pMutex) { m_pMutex->LockWrite(); } }
- ~VmaMutexLockWrite() { if(m_pMutex) { m_pMutex->UnlockWrite(); } }
+ VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) :
+ m_pMutex(useMutex ? &mutex : VMA_NULL)
+ { if(m_pMutex) { m_pMutex->LockWrite(); } }
+ ~VmaMutexLockWrite() { if(m_pMutex) { m_pMutex->UnlockWrite(); } }
private:
- VMA_RW_MUTEX* m_pMutex;
+ VMA_RW_MUTEX* m_pMutex;
};
#if VMA_DEBUG_GLOBAL_MUTEX
- static VMA_MUTEX gDebugGlobalMutex;
- #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
+ static VMA_MUTEX gDebugGlobalMutex;
+ #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
#else
- #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
#endif
// Minimum size of a free suballocation to register it in the free suballocation collection.
@@ -4280,33 +4160,33 @@ new element with value (key) should be inserted.
template <typename CmpLess, typename IterT, typename KeyT>
static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT &key, const CmpLess& cmp)
{
- size_t down = 0, up = (end - beg);
- while(down < up)
- {
- const size_t mid = (down + up) / 2;
- if(cmp(*(beg+mid), key))
- {
- down = mid + 1;
- }
- else
- {
- up = mid;
- }
- }
- return beg + down;
+ size_t down = 0, up = (end - beg);
+ while(down < up)
+ {
+ const size_t mid = (down + up) / 2;
+ if(cmp(*(beg+mid), key))
+ {
+ down = mid + 1;
+ }
+ else
+ {
+ up = mid;
+ }
+ }
+ return beg + down;
}
template<typename CmpLess, typename IterT, typename KeyT>
IterT VmaBinaryFindSorted(const IterT& beg, const IterT& end, const KeyT& value, const CmpLess& cmp)
{
- IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
- beg, end, value, cmp);
- if(it == end ||
- (!cmp(*it, value) && !cmp(value, *it)))
- {
- return it;
- }
- return end;
+ IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
+ beg, end, value, cmp);
+ if(it == end ||
+ (!cmp(*it, value) && !cmp(value, *it)))
+ {
+ return it;
+ }
+ return end;
}
/*
@@ -4317,22 +4197,22 @@ T must be pointer type, e.g. VmaAllocation, VmaPool.
template<typename T>
static bool VmaValidatePointerArray(uint32_t count, const T* arr)
{
- for(uint32_t i = 0; i < count; ++i)
- {
- const T iPtr = arr[i];
- if(iPtr == VMA_NULL)
- {
- return false;
- }
- for(uint32_t j = i + 1; j < count; ++j)
- {
- if(iPtr == arr[j])
- {
- return false;
- }
- }
- }
- return true;
+ for(uint32_t i = 0; i < count; ++i)
+ {
+ const T iPtr = arr[i];
+ if(iPtr == VMA_NULL)
+ {
+ return false;
+ }
+ for(uint32_t j = i + 1; j < count; ++j)
+ {
+ if(iPtr == arr[j])
+ {
+ return false;
+ }
+ }
+ }
+ return true;
}
////////////////////////////////////////////////////////////////////////////////
@@ -4340,44 +4220,44 @@ static bool VmaValidatePointerArray(uint32_t count, const T* arr)
static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment)
{
- if((pAllocationCallbacks != VMA_NULL) &&
- (pAllocationCallbacks->pfnAllocation != VMA_NULL))
- {
- return (*pAllocationCallbacks->pfnAllocation)(
- pAllocationCallbacks->pUserData,
- size,
- alignment,
- VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
- }
- else
- {
- return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
- }
+ if((pAllocationCallbacks != VMA_NULL) &&
+ (pAllocationCallbacks->pfnAllocation != VMA_NULL))
+ {
+ return (*pAllocationCallbacks->pfnAllocation)(
+ pAllocationCallbacks->pUserData,
+ size,
+ alignment,
+ VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+ }
+ else
+ {
+ return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
+ }
}
static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr)
{
- if((pAllocationCallbacks != VMA_NULL) &&
- (pAllocationCallbacks->pfnFree != VMA_NULL))
- {
- (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
- }
- else
- {
- VMA_SYSTEM_FREE(ptr);
- }
+ if((pAllocationCallbacks != VMA_NULL) &&
+ (pAllocationCallbacks->pfnFree != VMA_NULL))
+ {
+ (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
+ }
+ else
+ {
+ VMA_SYSTEM_FREE(ptr);
+ }
}
template<typename T>
static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks)
{
- return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
+ return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T));
}
template<typename T>
static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count)
{
- return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
+ return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T));
}
#define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
@@ -4387,45 +4267,45 @@ static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, si
template<typename T>
static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
{
- ptr->~T();
- VmaFree(pAllocationCallbacks, ptr);
+ ptr->~T();
+ VmaFree(pAllocationCallbacks, ptr);
}
template<typename T>
static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count)
{
- if(ptr != VMA_NULL)
- {
- for(size_t i = count; i--; )
- {
- ptr[i].~T();
- }
- VmaFree(pAllocationCallbacks, ptr);
- }
+ if(ptr != VMA_NULL)
+ {
+ for(size_t i = count; i--; )
+ {
+ ptr[i].~T();
+ }
+ VmaFree(pAllocationCallbacks, ptr);
+ }
}
static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr)
{
- if(srcStr != VMA_NULL)
- {
- const size_t len = strlen(srcStr);
- char* const result = vma_new_array(allocs, char, len + 1);
- memcpy(result, srcStr, len + 1);
- return result;
- }
- else
- {
- return VMA_NULL;
- }
+ if(srcStr != VMA_NULL)
+ {
+ const size_t len = strlen(srcStr);
+ char* const result = vma_new_array(allocs, char, len + 1);
+ memcpy(result, srcStr, len + 1);
+ return result;
+ }
+ else
+ {
+ return VMA_NULL;
+ }
}
static void VmaFreeString(const VkAllocationCallbacks* allocs, char* str)
{
- if(str != VMA_NULL)
- {
- const size_t len = strlen(str);
- vma_delete_array(allocs, str, len + 1);
- }
+ if(str != VMA_NULL)
+ {
+ const size_t len = strlen(str);
+ vma_delete_array(allocs, str, len + 1);
+ }
}
// STL-compatible allocator.
@@ -4433,27 +4313,27 @@ template<typename T>
class VmaStlAllocator
{
public:
- const VkAllocationCallbacks* const m_pCallbacks;
- typedef T value_type;
-
- VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
- template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
-
- T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
- void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
-
- template<typename U>
- bool operator==(const VmaStlAllocator<U>& rhs) const
- {
- return m_pCallbacks == rhs.m_pCallbacks;
- }
- template<typename U>
- bool operator!=(const VmaStlAllocator<U>& rhs) const
- {
- return m_pCallbacks != rhs.m_pCallbacks;
- }
-
- VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
+ const VkAllocationCallbacks* const m_pCallbacks;
+ typedef T value_type;
+
+ VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
+ template<typename U> VmaStlAllocator(const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
+
+ T* allocate(size_t n) { return VmaAllocateArray<T>(m_pCallbacks, n); }
+ void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); }
+
+ template<typename U>
+ bool operator==(const VmaStlAllocator<U>& rhs) const
+ {
+ return m_pCallbacks == rhs.m_pCallbacks;
+ }
+ template<typename U>
+ bool operator!=(const VmaStlAllocator<U>& rhs) const
+ {
+ return m_pCallbacks != rhs.m_pCallbacks;
+ }
+
+ VmaStlAllocator& operator=(const VmaStlAllocator& x) = delete;
};
#if VMA_USE_STL_VECTOR
@@ -4463,13 +4343,13 @@ public:
template<typename T, typename allocatorT>
static void VmaVectorInsert(std::vector<T, allocatorT>& vec, size_t index, const T& item)
{
- vec.insert(vec.begin() + index, item);
+ vec.insert(vec.begin() + index, item);
}
template<typename T, typename allocatorT>
static void VmaVectorRemove(std::vector<T, allocatorT>& vec, size_t index)
{
- vec.erase(vec.begin() + index);
+ vec.erase(vec.begin() + index);
}
#else // #if VMA_USE_STL_VECTOR
@@ -4481,220 +4361,220 @@ template<typename T, typename AllocatorT>
class VmaVector
{
public:
- typedef T value_type;
-
- VmaVector(const AllocatorT& allocator) :
- m_Allocator(allocator),
- m_pArray(VMA_NULL),
- m_Count(0),
- m_Capacity(0)
- {
- }
-
- VmaVector(size_t count, const AllocatorT& allocator) :
- m_Allocator(allocator),
- m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
- m_Count(count),
- m_Capacity(count)
- {
- }
-
- // This version of the constructor is here for compatibility with pre-C++14 std::vector.
- // value is unused.
- VmaVector(size_t count, const T& value, const AllocatorT& allocator)
- : VmaVector(count, allocator) {}
-
- VmaVector(const VmaVector<T, AllocatorT>& src) :
- m_Allocator(src.m_Allocator),
- m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
- m_Count(src.m_Count),
- m_Capacity(src.m_Count)
- {
- if(m_Count != 0)
- {
- memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
- }
- }
-
- ~VmaVector()
- {
- VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- }
-
- VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
- {
- if(&rhs != this)
- {
- resize(rhs.m_Count);
- if(m_Count != 0)
- {
- memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
- }
- }
- return *this;
- }
-
- bool empty() const { return m_Count == 0; }
- size_t size() const { return m_Count; }
- T* data() { return m_pArray; }
- const T* data() const { return m_pArray; }
-
- T& operator[](size_t index)
- {
- VMA_HEAVY_ASSERT(index < m_Count);
- return m_pArray[index];
- }
- const T& operator[](size_t index) const
- {
- VMA_HEAVY_ASSERT(index < m_Count);
- return m_pArray[index];
- }
-
- T& front()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[0];
- }
- const T& front() const
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[0];
- }
- T& back()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[m_Count - 1];
- }
- const T& back() const
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- return m_pArray[m_Count - 1];
- }
-
- void reserve(size_t newCapacity, bool freeMemory = false)
- {
- newCapacity = VMA_MAX(newCapacity, m_Count);
-
- if((newCapacity < m_Capacity) && !freeMemory)
- {
- newCapacity = m_Capacity;
- }
-
- if(newCapacity != m_Capacity)
- {
- T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
- if(m_Count != 0)
- {
- memcpy(newArray, m_pArray, m_Count * sizeof(T));
- }
- VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- m_Capacity = newCapacity;
- m_pArray = newArray;
- }
- }
-
- void resize(size_t newCount, bool freeMemory = false)
- {
- size_t newCapacity = m_Capacity;
- if(newCount > m_Capacity)
- {
- newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
- }
- else if(freeMemory)
- {
- newCapacity = newCount;
- }
-
- if(newCapacity != m_Capacity)
- {
- T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
- const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
- if(elementsToCopy != 0)
- {
- memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
- }
- VmaFree(m_Allocator.m_pCallbacks, m_pArray);
- m_Capacity = newCapacity;
- m_pArray = newArray;
- }
-
- m_Count = newCount;
- }
-
- void clear(bool freeMemory = false)
- {
- resize(0, freeMemory);
- }
-
- void insert(size_t index, const T& src)
- {
- VMA_HEAVY_ASSERT(index <= m_Count);
- const size_t oldCount = size();
- resize(oldCount + 1);
- if(index < oldCount)
- {
- memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
- }
- m_pArray[index] = src;
- }
-
- void remove(size_t index)
- {
- VMA_HEAVY_ASSERT(index < m_Count);
- const size_t oldCount = size();
- if(index < oldCount - 1)
- {
- memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
- }
- resize(oldCount - 1);
- }
-
- void push_back(const T& src)
- {
- const size_t newIndex = size();
- resize(newIndex + 1);
- m_pArray[newIndex] = src;
- }
-
- void pop_back()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- resize(size() - 1);
- }
-
- void push_front(const T& src)
- {
- insert(0, src);
- }
-
- void pop_front()
- {
- VMA_HEAVY_ASSERT(m_Count > 0);
- remove(0);
- }
-
- typedef T* iterator;
-
- iterator begin() { return m_pArray; }
- iterator end() { return m_pArray + m_Count; }
+ typedef T value_type;
+
+ VmaVector(const AllocatorT& allocator) :
+ m_Allocator(allocator),
+ m_pArray(VMA_NULL),
+ m_Count(0),
+ m_Capacity(0)
+ {
+ }
+
+ VmaVector(size_t count, const AllocatorT& allocator) :
+ m_Allocator(allocator),
+ m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
+ m_Count(count),
+ m_Capacity(count)
+ {
+ }
+
+ // This version of the constructor is here for compatibility with pre-C++14 std::vector.
+ // value is unused.
+ VmaVector(size_t count, const T& value, const AllocatorT& allocator)
+ : VmaVector(count, allocator) {}
+
+ VmaVector(const VmaVector<T, AllocatorT>& src) :
+ m_Allocator(src.m_Allocator),
+ m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
+ m_Count(src.m_Count),
+ m_Capacity(src.m_Count)
+ {
+ if(m_Count != 0)
+ {
+ memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T));
+ }
+ }
+
+ ~VmaVector()
+ {
+ VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+ }
+
+ VmaVector& operator=(const VmaVector<T, AllocatorT>& rhs)
+ {
+ if(&rhs != this)
+ {
+ resize(rhs.m_Count);
+ if(m_Count != 0)
+ {
+ memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T));
+ }
+ }
+ return *this;
+ }
+
+ bool empty() const { return m_Count == 0; }
+ size_t size() const { return m_Count; }
+ T* data() { return m_pArray; }
+ const T* data() const { return m_pArray; }
+
+ T& operator[](size_t index)
+ {
+ VMA_HEAVY_ASSERT(index < m_Count);
+ return m_pArray[index];
+ }
+ const T& operator[](size_t index) const
+ {
+ VMA_HEAVY_ASSERT(index < m_Count);
+ return m_pArray[index];
+ }
+
+ T& front()
+ {
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ return m_pArray[0];
+ }
+ const T& front() const
+ {
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ return m_pArray[0];
+ }
+ T& back()
+ {
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ return m_pArray[m_Count - 1];
+ }
+ const T& back() const
+ {
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ return m_pArray[m_Count - 1];
+ }
+
+ void reserve(size_t newCapacity, bool freeMemory = false)
+ {
+ newCapacity = VMA_MAX(newCapacity, m_Count);
+
+ if((newCapacity < m_Capacity) && !freeMemory)
+ {
+ newCapacity = m_Capacity;
+ }
+
+ if(newCapacity != m_Capacity)
+ {
+ T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
+ if(m_Count != 0)
+ {
+ memcpy(newArray, m_pArray, m_Count * sizeof(T));
+ }
+ VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+ m_Capacity = newCapacity;
+ m_pArray = newArray;
+ }
+ }
+
+ void resize(size_t newCount, bool freeMemory = false)
+ {
+ size_t newCapacity = m_Capacity;
+ if(newCount > m_Capacity)
+ {
+ newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8));
+ }
+ else if(freeMemory)
+ {
+ newCapacity = newCount;
+ }
+
+ if(newCapacity != m_Capacity)
+ {
+ T* const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
+ const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
+ if(elementsToCopy != 0)
+ {
+ memcpy(newArray, m_pArray, elementsToCopy * sizeof(T));
+ }
+ VmaFree(m_Allocator.m_pCallbacks, m_pArray);
+ m_Capacity = newCapacity;
+ m_pArray = newArray;
+ }
+
+ m_Count = newCount;
+ }
+
+ void clear(bool freeMemory = false)
+ {
+ resize(0, freeMemory);
+ }
+
+ void insert(size_t index, const T& src)
+ {
+ VMA_HEAVY_ASSERT(index <= m_Count);
+ const size_t oldCount = size();
+ resize(oldCount + 1);
+ if(index < oldCount)
+ {
+ memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T));
+ }
+ m_pArray[index] = src;
+ }
+
+ void remove(size_t index)
+ {
+ VMA_HEAVY_ASSERT(index < m_Count);
+ const size_t oldCount = size();
+ if(index < oldCount - 1)
+ {
+ memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T));
+ }
+ resize(oldCount - 1);
+ }
+
+ void push_back(const T& src)
+ {
+ const size_t newIndex = size();
+ resize(newIndex + 1);
+ m_pArray[newIndex] = src;
+ }
+
+ void pop_back()
+ {
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ resize(size() - 1);
+ }
+
+ void push_front(const T& src)
+ {
+ insert(0, src);
+ }
+
+ void pop_front()
+ {
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ remove(0);
+ }
+
+ typedef T* iterator;
+
+ iterator begin() { return m_pArray; }
+ iterator end() { return m_pArray + m_Count; }
private:
- AllocatorT m_Allocator;
- T* m_pArray;
- size_t m_Count;
- size_t m_Capacity;
+ AllocatorT m_Allocator;
+ T* m_pArray;
+ size_t m_Count;
+ size_t m_Capacity;
};
template<typename T, typename allocatorT>
static void VmaVectorInsert(VmaVector<T, allocatorT>& vec, size_t index, const T& item)
{
- vec.insert(index, item);
+ vec.insert(index, item);
}
template<typename T, typename allocatorT>
static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
{
- vec.remove(index);
+ vec.remove(index);
}
#endif // #if VMA_USE_STL_VECTOR
@@ -4702,31 +4582,31 @@ static void VmaVectorRemove(VmaVector<T, allocatorT>& vec, size_t index)
template<typename CmpLess, typename VectorT>
size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value)
{
- const size_t indexToInsert = VmaBinaryFindFirstNotLess(
- vector.data(),
- vector.data() + vector.size(),
- value,
- CmpLess()) - vector.data();
- VmaVectorInsert(vector, indexToInsert, value);
- return indexToInsert;
+ const size_t indexToInsert = VmaBinaryFindFirstNotLess(
+ vector.data(),
+ vector.data() + vector.size(),
+ value,
+ CmpLess()) - vector.data();
+ VmaVectorInsert(vector, indexToInsert, value);
+ return indexToInsert;
}
template<typename CmpLess, typename VectorT>
bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value)
{
- CmpLess comparator;
- typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
- vector.begin(),
- vector.end(),
- value,
- comparator);
- if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
- {
- size_t indexToRemove = it - vector.begin();
- VmaVectorRemove(vector, indexToRemove);
- return true;
- }
- return false;
+ CmpLess comparator;
+ typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
+ vector.begin(),
+ vector.end(),
+ value,
+ comparator);
+ if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
+ {
+ size_t indexToRemove = it - vector.begin();
+ VmaVectorRemove(vector, indexToRemove);
+ return true;
+ }
+ return false;
}
////////////////////////////////////////////////////////////////////////////////
@@ -4740,120 +4620,120 @@ allocator can create multiple blocks.
template<typename T>
class VmaPoolAllocator
{
- VMA_CLASS_NO_COPY(VmaPoolAllocator)
+ VMA_CLASS_NO_COPY(VmaPoolAllocator)
public:
- VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
- ~VmaPoolAllocator();
- template<typename... Types> T* Alloc(Types... args);
- void Free(T* ptr);
+ VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
+ ~VmaPoolAllocator();
+ T* Alloc();
+ void Free(T* ptr);
private:
- union Item
- {
- uint32_t NextFreeIndex;
- alignas(T) char Value[sizeof(T)];
- };
-
- struct ItemBlock
- {
- Item* pItems;
- uint32_t Capacity;
- uint32_t FirstFreeIndex;
- };
-
- const VkAllocationCallbacks* m_pAllocationCallbacks;
- const uint32_t m_FirstBlockCapacity;
- VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
-
- ItemBlock& CreateNewBlock();
+ union Item
+ {
+ uint32_t NextFreeIndex;
+ alignas(T) char Value[sizeof(T)];
+ };
+
+ struct ItemBlock
+ {
+ Item* pItems;
+ uint32_t Capacity;
+ uint32_t FirstFreeIndex;
+ };
+
+ const VkAllocationCallbacks* m_pAllocationCallbacks;
+ const uint32_t m_FirstBlockCapacity;
+ VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
+
+ ItemBlock& CreateNewBlock();
};
template<typename T>
VmaPoolAllocator<T>::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
- m_pAllocationCallbacks(pAllocationCallbacks),
- m_FirstBlockCapacity(firstBlockCapacity),
- m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
+ m_pAllocationCallbacks(pAllocationCallbacks),
+ m_FirstBlockCapacity(firstBlockCapacity),
+ m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
{
- VMA_ASSERT(m_FirstBlockCapacity > 1);
+ VMA_ASSERT(m_FirstBlockCapacity > 1);
}
template<typename T>
VmaPoolAllocator<T>::~VmaPoolAllocator()
{
- for(size_t i = m_ItemBlocks.size(); i--; )
- vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
- m_ItemBlocks.clear();
+ for(size_t i = m_ItemBlocks.size(); i--; )
+ vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
+ m_ItemBlocks.clear();
}
template<typename T>
-template<typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
-{
- for(size_t i = m_ItemBlocks.size(); i--; )
- {
- ItemBlock& block = m_ItemBlocks[i];
- // This block has some free items: Use first one.
- if(block.FirstFreeIndex != UINT32_MAX)
- {
- Item* const pItem = &block.pItems[block.FirstFreeIndex];
- block.FirstFreeIndex = pItem->NextFreeIndex;
- T* result = (T*)&pItem->Value;
- new(result)T(std::forward<Types>(args)...); // Explicit constructor call.
- return result;
- }
- }
-
- // No block has free item: Create new one and use it.
- ItemBlock& newBlock = CreateNewBlock();
- Item* const pItem = &newBlock.pItems[0];
- newBlock.FirstFreeIndex = pItem->NextFreeIndex;
- T* result = (T*)&pItem->Value;
- new(result)T(std::forward<Types>(args)...); // Explicit constructor call.
- return result;
+T* VmaPoolAllocator<T>::Alloc()
+{
+ for(size_t i = m_ItemBlocks.size(); i--; )
+ {
+ ItemBlock& block = m_ItemBlocks[i];
+ // This block has some free items: Use first one.
+ if(block.FirstFreeIndex != UINT32_MAX)
+ {
+ Item* const pItem = &block.pItems[block.FirstFreeIndex];
+ block.FirstFreeIndex = pItem->NextFreeIndex;
+ T* result = (T*)&pItem->Value;
+ new(result)T(); // Explicit constructor call.
+ return result;
+ }
+ }
+
+ // No block has free item: Create new one and use it.
+ ItemBlock& newBlock = CreateNewBlock();
+ Item* const pItem = &newBlock.pItems[0];
+ newBlock.FirstFreeIndex = pItem->NextFreeIndex;
+ T* result = (T*)&pItem->Value;
+ new(result)T(); // Explicit constructor call.
+ return result;
}
template<typename T>
void VmaPoolAllocator<T>::Free(T* ptr)
{
- // Search all memory blocks to find ptr.
- for(size_t i = m_ItemBlocks.size(); i--; )
- {
- ItemBlock& block = m_ItemBlocks[i];
-
- // Casting to union.
- Item* pItemPtr;
- memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
-
- // Check if pItemPtr is in address range of this block.
- if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
- {
- ptr->~T(); // Explicit destructor call.
- const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
- pItemPtr->NextFreeIndex = block.FirstFreeIndex;
- block.FirstFreeIndex = index;
- return;
- }
- }
- VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
+ // Search all memory blocks to find ptr.
+ for(size_t i = m_ItemBlocks.size(); i--; )
+ {
+ ItemBlock& block = m_ItemBlocks[i];
+
+ // Casting to union.
+ Item* pItemPtr;
+ memcpy(&pItemPtr, &ptr, sizeof(pItemPtr));
+
+ // Check if pItemPtr is in address range of this block.
+ if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
+ {
+ ptr->~T(); // Explicit destructor call.
+ const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
+ pItemPtr->NextFreeIndex = block.FirstFreeIndex;
+ block.FirstFreeIndex = index;
+ return;
+ }
+ }
+ VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool.");
}
template<typename T>
typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
{
- const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
- m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
+ const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
+ m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
- const ItemBlock newBlock = {
- vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
- newBlockCapacity,
- 0 };
+ const ItemBlock newBlock = {
+ vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
+ newBlockCapacity,
+ 0 };
- m_ItemBlocks.push_back(newBlock);
+ m_ItemBlocks.push_back(newBlock);
- // Setup singly-linked list of all free items in this block.
- for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
- newBlock.pItems[i].NextFreeIndex = i + 1;
- newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
- return m_ItemBlocks.back();
+ // Setup singly-linked list of all free items in this block.
+ for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
+ newBlock.pItems[i].NextFreeIndex = i + 1;
+ newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
+ return m_ItemBlocks.back();
}
////////////////////////////////////////////////////////////////////////////////
@@ -4868,462 +4748,462 @@ typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
template<typename T>
struct VmaListItem
{
- VmaListItem* pPrev;
- VmaListItem* pNext;
- T Value;
+ VmaListItem* pPrev;
+ VmaListItem* pNext;
+ T Value;
};
// Doubly linked list.
template<typename T>
class VmaRawList
{
- VMA_CLASS_NO_COPY(VmaRawList)
+ VMA_CLASS_NO_COPY(VmaRawList)
public:
- typedef VmaListItem<T> ItemType;
+ typedef VmaListItem<T> ItemType;
- VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
- ~VmaRawList();
- void Clear();
+ VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks);
+ ~VmaRawList();
+ void Clear();
- size_t GetCount() const { return m_Count; }
- bool IsEmpty() const { return m_Count == 0; }
+ size_t GetCount() const { return m_Count; }
+ bool IsEmpty() const { return m_Count == 0; }
- ItemType* Front() { return m_pFront; }
- const ItemType* Front() const { return m_pFront; }
- ItemType* Back() { return m_pBack; }
- const ItemType* Back() const { return m_pBack; }
+ ItemType* Front() { return m_pFront; }
+ const ItemType* Front() const { return m_pFront; }
+ ItemType* Back() { return m_pBack; }
+ const ItemType* Back() const { return m_pBack; }
- ItemType* PushBack();
- ItemType* PushFront();
- ItemType* PushBack(const T& value);
- ItemType* PushFront(const T& value);
- void PopBack();
- void PopFront();
-
- // Item can be null - it means PushBack.
- ItemType* InsertBefore(ItemType* pItem);
- // Item can be null - it means PushFront.
- ItemType* InsertAfter(ItemType* pItem);
+ ItemType* PushBack();
+ ItemType* PushFront();
+ ItemType* PushBack(const T& value);
+ ItemType* PushFront(const T& value);
+ void PopBack();
+ void PopFront();
+
+ // Item can be null - it means PushBack.
+ ItemType* InsertBefore(ItemType* pItem);
+ // Item can be null - it means PushFront.
+ ItemType* InsertAfter(ItemType* pItem);
- ItemType* InsertBefore(ItemType* pItem, const T& value);
- ItemType* InsertAfter(ItemType* pItem, const T& value);
+ ItemType* InsertBefore(ItemType* pItem, const T& value);
+ ItemType* InsertAfter(ItemType* pItem, const T& value);
- void Remove(ItemType* pItem);
+ void Remove(ItemType* pItem);
private:
- const VkAllocationCallbacks* const m_pAllocationCallbacks;
- VmaPoolAllocator<ItemType> m_ItemAllocator;
- ItemType* m_pFront;
- ItemType* m_pBack;
- size_t m_Count;
+ const VkAllocationCallbacks* const m_pAllocationCallbacks;
+ VmaPoolAllocator<ItemType> m_ItemAllocator;
+ ItemType* m_pFront;
+ ItemType* m_pBack;
+ size_t m_Count;
};
template<typename T>
VmaRawList<T>::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) :
- m_pAllocationCallbacks(pAllocationCallbacks),
- m_ItemAllocator(pAllocationCallbacks, 128),
- m_pFront(VMA_NULL),
- m_pBack(VMA_NULL),
- m_Count(0)
+ m_pAllocationCallbacks(pAllocationCallbacks),
+ m_ItemAllocator(pAllocationCallbacks, 128),
+ m_pFront(VMA_NULL),
+ m_pBack(VMA_NULL),
+ m_Count(0)
{
}
template<typename T>
VmaRawList<T>::~VmaRawList()
{
- // Intentionally not calling Clear, because that would be unnecessary
- // computations to return all items to m_ItemAllocator as free.
+ // Intentionally not calling Clear, because that would be unnecessary
+ // computations to return all items to m_ItemAllocator as free.
}
template<typename T>
void VmaRawList<T>::Clear()
{
- if(IsEmpty() == false)
- {
- ItemType* pItem = m_pBack;
- while(pItem != VMA_NULL)
- {
- ItemType* const pPrevItem = pItem->pPrev;
- m_ItemAllocator.Free(pItem);
- pItem = pPrevItem;
- }
- m_pFront = VMA_NULL;
- m_pBack = VMA_NULL;
- m_Count = 0;
- }
+ if(IsEmpty() == false)
+ {
+ ItemType* pItem = m_pBack;
+ while(pItem != VMA_NULL)
+ {
+ ItemType* const pPrevItem = pItem->pPrev;
+ m_ItemAllocator.Free(pItem);
+ pItem = pPrevItem;
+ }
+ m_pFront = VMA_NULL;
+ m_pBack = VMA_NULL;
+ m_Count = 0;
+ }
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::PushBack()
{
- ItemType* const pNewItem = m_ItemAllocator.Alloc();
- pNewItem->pNext = VMA_NULL;
- if(IsEmpty())
- {
- pNewItem->pPrev = VMA_NULL;
- m_pFront = pNewItem;
- m_pBack = pNewItem;
- m_Count = 1;
- }
- else
- {
- pNewItem->pPrev = m_pBack;
- m_pBack->pNext = pNewItem;
- m_pBack = pNewItem;
- ++m_Count;
- }
- return pNewItem;
+ ItemType* const pNewItem = m_ItemAllocator.Alloc();
+ pNewItem->pNext = VMA_NULL;
+ if(IsEmpty())
+ {
+ pNewItem->pPrev = VMA_NULL;
+ m_pFront = pNewItem;
+ m_pBack = pNewItem;
+ m_Count = 1;
+ }
+ else
+ {
+ pNewItem->pPrev = m_pBack;
+ m_pBack->pNext = pNewItem;
+ m_pBack = pNewItem;
+ ++m_Count;
+ }
+ return pNewItem;
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::PushFront()
{
- ItemType* const pNewItem = m_ItemAllocator.Alloc();
- pNewItem->pPrev = VMA_NULL;
- if(IsEmpty())
- {
- pNewItem->pNext = VMA_NULL;
- m_pFront = pNewItem;
- m_pBack = pNewItem;
- m_Count = 1;
- }
- else
- {
- pNewItem->pNext = m_pFront;
- m_pFront->pPrev = pNewItem;
- m_pFront = pNewItem;
- ++m_Count;
- }
- return pNewItem;
+ ItemType* const pNewItem = m_ItemAllocator.Alloc();
+ pNewItem->pPrev = VMA_NULL;
+ if(IsEmpty())
+ {
+ pNewItem->pNext = VMA_NULL;
+ m_pFront = pNewItem;
+ m_pBack = pNewItem;
+ m_Count = 1;
+ }
+ else
+ {
+ pNewItem->pNext = m_pFront;
+ m_pFront->pPrev = pNewItem;
+ m_pFront = pNewItem;
+ ++m_Count;
+ }
+ return pNewItem;
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::PushBack(const T& value)
{
- ItemType* const pNewItem = PushBack();
- pNewItem->Value = value;
- return pNewItem;
+ ItemType* const pNewItem = PushBack();
+ pNewItem->Value = value;
+ return pNewItem;
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::PushFront(const T& value)
{
- ItemType* const pNewItem = PushFront();
- pNewItem->Value = value;
- return pNewItem;
+ ItemType* const pNewItem = PushFront();
+ pNewItem->Value = value;
+ return pNewItem;
}
template<typename T>
void VmaRawList<T>::PopBack()
{
- VMA_HEAVY_ASSERT(m_Count > 0);
- ItemType* const pBackItem = m_pBack;
- ItemType* const pPrevItem = pBackItem->pPrev;
- if(pPrevItem != VMA_NULL)
- {
- pPrevItem->pNext = VMA_NULL;
- }
- m_pBack = pPrevItem;
- m_ItemAllocator.Free(pBackItem);
- --m_Count;
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ ItemType* const pBackItem = m_pBack;
+ ItemType* const pPrevItem = pBackItem->pPrev;
+ if(pPrevItem != VMA_NULL)
+ {
+ pPrevItem->pNext = VMA_NULL;
+ }
+ m_pBack = pPrevItem;
+ m_ItemAllocator.Free(pBackItem);
+ --m_Count;
}
template<typename T>
void VmaRawList<T>::PopFront()
{
- VMA_HEAVY_ASSERT(m_Count > 0);
- ItemType* const pFrontItem = m_pFront;
- ItemType* const pNextItem = pFrontItem->pNext;
- if(pNextItem != VMA_NULL)
- {
- pNextItem->pPrev = VMA_NULL;
- }
- m_pFront = pNextItem;
- m_ItemAllocator.Free(pFrontItem);
- --m_Count;
+ VMA_HEAVY_ASSERT(m_Count > 0);
+ ItemType* const pFrontItem = m_pFront;
+ ItemType* const pNextItem = pFrontItem->pNext;
+ if(pNextItem != VMA_NULL)
+ {
+ pNextItem->pPrev = VMA_NULL;
+ }
+ m_pFront = pNextItem;
+ m_ItemAllocator.Free(pFrontItem);
+ --m_Count;
}
template<typename T>
void VmaRawList<T>::Remove(ItemType* pItem)
{
- VMA_HEAVY_ASSERT(pItem != VMA_NULL);
- VMA_HEAVY_ASSERT(m_Count > 0);
-
- if(pItem->pPrev != VMA_NULL)
- {
- pItem->pPrev->pNext = pItem->pNext;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pFront == pItem);
- m_pFront = pItem->pNext;
- }
-
- if(pItem->pNext != VMA_NULL)
- {
- pItem->pNext->pPrev = pItem->pPrev;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pBack == pItem);
- m_pBack = pItem->pPrev;
- }
-
- m_ItemAllocator.Free(pItem);
- --m_Count;
+ VMA_HEAVY_ASSERT(pItem != VMA_NULL);
+ VMA_HEAVY_ASSERT(m_Count > 0);
+
+ if(pItem->pPrev != VMA_NULL)
+ {
+ pItem->pPrev->pNext = pItem->pNext;
+ }
+ else
+ {
+ VMA_HEAVY_ASSERT(m_pFront == pItem);
+ m_pFront = pItem->pNext;
+ }
+
+ if(pItem->pNext != VMA_NULL)
+ {
+ pItem->pNext->pPrev = pItem->pPrev;
+ }
+ else
+ {
+ VMA_HEAVY_ASSERT(m_pBack == pItem);
+ m_pBack = pItem->pPrev;
+ }
+
+ m_ItemAllocator.Free(pItem);
+ --m_Count;
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
{
- if(pItem != VMA_NULL)
- {
- ItemType* const prevItem = pItem->pPrev;
- ItemType* const newItem = m_ItemAllocator.Alloc();
- newItem->pPrev = prevItem;
- newItem->pNext = pItem;
- pItem->pPrev = newItem;
- if(prevItem != VMA_NULL)
- {
- prevItem->pNext = newItem;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pFront == pItem);
- m_pFront = newItem;
- }
- ++m_Count;
- return newItem;
- }
- else
- return PushBack();
+ if(pItem != VMA_NULL)
+ {
+ ItemType* const prevItem = pItem->pPrev;
+ ItemType* const newItem = m_ItemAllocator.Alloc();
+ newItem->pPrev = prevItem;
+ newItem->pNext = pItem;
+ pItem->pPrev = newItem;
+ if(prevItem != VMA_NULL)
+ {
+ prevItem->pNext = newItem;
+ }
+ else
+ {
+ VMA_HEAVY_ASSERT(m_pFront == pItem);
+ m_pFront = newItem;
+ }
+ ++m_Count;
+ return newItem;
+ }
+ else
+ return PushBack();
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
{
- if(pItem != VMA_NULL)
- {
- ItemType* const nextItem = pItem->pNext;
- ItemType* const newItem = m_ItemAllocator.Alloc();
- newItem->pNext = nextItem;
- newItem->pPrev = pItem;
- pItem->pNext = newItem;
- if(nextItem != VMA_NULL)
- {
- nextItem->pPrev = newItem;
- }
- else
- {
- VMA_HEAVY_ASSERT(m_pBack == pItem);
- m_pBack = newItem;
- }
- ++m_Count;
- return newItem;
- }
- else
- return PushFront();
+ if(pItem != VMA_NULL)
+ {
+ ItemType* const nextItem = pItem->pNext;
+ ItemType* const newItem = m_ItemAllocator.Alloc();
+ newItem->pNext = nextItem;
+ newItem->pPrev = pItem;
+ pItem->pNext = newItem;
+ if(nextItem != VMA_NULL)
+ {
+ nextItem->pPrev = newItem;
+ }
+ else
+ {
+ VMA_HEAVY_ASSERT(m_pBack == pItem);
+ m_pBack = newItem;
+ }
+ ++m_Count;
+ return newItem;
+ }
+ else
+ return PushFront();
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem, const T& value)
{
- ItemType* const newItem = InsertBefore(pItem);
- newItem->Value = value;
- return newItem;
+ ItemType* const newItem = InsertBefore(pItem);
+ newItem->Value = value;
+ return newItem;
}
template<typename T>
VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem, const T& value)
{
- ItemType* const newItem = InsertAfter(pItem);
- newItem->Value = value;
- return newItem;
+ ItemType* const newItem = InsertAfter(pItem);
+ newItem->Value = value;
+ return newItem;
}
template<typename T, typename AllocatorT>
class VmaList
{
- VMA_CLASS_NO_COPY(VmaList)
+ VMA_CLASS_NO_COPY(VmaList)
public:
- class iterator
- {
- public:
- iterator() :
- m_pList(VMA_NULL),
- m_pItem(VMA_NULL)
- {
- }
-
- T& operator*() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return m_pItem->Value;
- }
- T* operator->() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return &m_pItem->Value;
- }
-
- iterator& operator++()
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- m_pItem = m_pItem->pNext;
- return *this;
- }
- iterator& operator--()
- {
- if(m_pItem != VMA_NULL)
- {
- m_pItem = m_pItem->pPrev;
- }
- else
- {
- VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
- m_pItem = m_pList->Back();
- }
- return *this;
- }
-
- iterator operator++(int)
- {
- iterator result = *this;
- ++*this;
- return result;
- }
- iterator operator--(int)
- {
- iterator result = *this;
- --*this;
- return result;
- }
-
- bool operator==(const iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem == rhs.m_pItem;
- }
- bool operator!=(const iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem != rhs.m_pItem;
- }
-
- private:
- VmaRawList<T>* m_pList;
- VmaListItem<T>* m_pItem;
-
- iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
- m_pList(pList),
- m_pItem(pItem)
- {
- }
-
- friend class VmaList<T, AllocatorT>;
- };
-
- class const_iterator
- {
- public:
- const_iterator() :
- m_pList(VMA_NULL),
- m_pItem(VMA_NULL)
- {
- }
-
- const_iterator(const iterator& src) :
- m_pList(src.m_pList),
- m_pItem(src.m_pItem)
- {
- }
-
- const T& operator*() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return m_pItem->Value;
- }
- const T* operator->() const
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- return &m_pItem->Value;
- }
-
- const_iterator& operator++()
- {
- VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
- m_pItem = m_pItem->pNext;
- return *this;
- }
- const_iterator& operator--()
- {
- if(m_pItem != VMA_NULL)
- {
- m_pItem = m_pItem->pPrev;
- }
- else
- {
- VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
- m_pItem = m_pList->Back();
- }
- return *this;
- }
-
- const_iterator operator++(int)
- {
- const_iterator result = *this;
- ++*this;
- return result;
- }
- const_iterator operator--(int)
- {
- const_iterator result = *this;
- --*this;
- return result;
- }
-
- bool operator==(const const_iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem == rhs.m_pItem;
- }
- bool operator!=(const const_iterator& rhs) const
- {
- VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
- return m_pItem != rhs.m_pItem;
- }
-
- private:
- const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
- m_pList(pList),
- m_pItem(pItem)
- {
- }
-
- const VmaRawList<T>* m_pList;
- const VmaListItem<T>* m_pItem;
-
- friend class VmaList<T, AllocatorT>;
- };
-
- VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
-
- bool empty() const { return m_RawList.IsEmpty(); }
- size_t size() const { return m_RawList.GetCount(); }
-
- iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
- iterator end() { return iterator(&m_RawList, VMA_NULL); }
-
- const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
- const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
-
- void clear() { m_RawList.Clear(); }
- void push_back(const T& value) { m_RawList.PushBack(value); }
- void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
- iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
+ class iterator
+ {
+ public:
+ iterator() :
+ m_pList(VMA_NULL),
+ m_pItem(VMA_NULL)
+ {
+ }
+
+ T& operator*() const
+ {
+ VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ return m_pItem->Value;
+ }
+ T* operator->() const
+ {
+ VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ return &m_pItem->Value;
+ }
+
+ iterator& operator++()
+ {
+ VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ m_pItem = m_pItem->pNext;
+ return *this;
+ }
+ iterator& operator--()
+ {
+ if(m_pItem != VMA_NULL)
+ {
+ m_pItem = m_pItem->pPrev;
+ }
+ else
+ {
+ VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
+ m_pItem = m_pList->Back();
+ }
+ return *this;
+ }
+
+ iterator operator++(int)
+ {
+ iterator result = *this;
+ ++*this;
+ return result;
+ }
+ iterator operator--(int)
+ {
+ iterator result = *this;
+ --*this;
+ return result;
+ }
+
+ bool operator==(const iterator& rhs) const
+ {
+ VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ return m_pItem == rhs.m_pItem;
+ }
+ bool operator!=(const iterator& rhs) const
+ {
+ VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ return m_pItem != rhs.m_pItem;
+ }
+
+ private:
+ VmaRawList<T>* m_pList;
+ VmaListItem<T>* m_pItem;
+
+ iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
+ m_pList(pList),
+ m_pItem(pItem)
+ {
+ }
+
+ friend class VmaList<T, AllocatorT>;
+ };
+
+ class const_iterator
+ {
+ public:
+ const_iterator() :
+ m_pList(VMA_NULL),
+ m_pItem(VMA_NULL)
+ {
+ }
+
+ const_iterator(const iterator& src) :
+ m_pList(src.m_pList),
+ m_pItem(src.m_pItem)
+ {
+ }
+
+ const T& operator*() const
+ {
+ VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ return m_pItem->Value;
+ }
+ const T* operator->() const
+ {
+ VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ return &m_pItem->Value;
+ }
+
+ const_iterator& operator++()
+ {
+ VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
+ m_pItem = m_pItem->pNext;
+ return *this;
+ }
+ const_iterator& operator--()
+ {
+ if(m_pItem != VMA_NULL)
+ {
+ m_pItem = m_pItem->pPrev;
+ }
+ else
+ {
+ VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
+ m_pItem = m_pList->Back();
+ }
+ return *this;
+ }
+
+ const_iterator operator++(int)
+ {
+ const_iterator result = *this;
+ ++*this;
+ return result;
+ }
+ const_iterator operator--(int)
+ {
+ const_iterator result = *this;
+ --*this;
+ return result;
+ }
+
+ bool operator==(const const_iterator& rhs) const
+ {
+ VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ return m_pItem == rhs.m_pItem;
+ }
+ bool operator!=(const const_iterator& rhs) const
+ {
+ VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
+ return m_pItem != rhs.m_pItem;
+ }
+
+ private:
+ const_iterator(const VmaRawList<T>* pList, const VmaListItem<T>* pItem) :
+ m_pList(pList),
+ m_pItem(pItem)
+ {
+ }
+
+ const VmaRawList<T>* m_pList;
+ const VmaListItem<T>* m_pItem;
+
+ friend class VmaList<T, AllocatorT>;
+ };
+
+ VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
+
+ bool empty() const { return m_RawList.IsEmpty(); }
+ size_t size() const { return m_RawList.GetCount(); }
+
+ iterator begin() { return iterator(&m_RawList, m_RawList.Front()); }
+ iterator end() { return iterator(&m_RawList, VMA_NULL); }
+
+ const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); }
+ const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); }
+
+ void clear() { m_RawList.Clear(); }
+ void push_back(const T& value) { m_RawList.PushBack(value); }
+ void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
+ iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
private:
- VmaRawList<T> m_RawList;
+ VmaRawList<T> m_RawList;
};
#endif // #if VMA_USE_STL_LIST
@@ -5339,18 +5219,18 @@ private:
#define VmaPair std::pair
#define VMA_MAP_TYPE(KeyT, ValueT) \
- std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
+ std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
#else // #if VMA_USE_STL_UNORDERED_MAP
template<typename T1, typename T2>
struct VmaPair
{
- T1 first;
- T2 second;
+ T1 first;
+ T2 second;
- VmaPair() : first(), second() { }
- VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
+ VmaPair() : first(), second() { }
+ VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
};
/* Class compatible with subset of interface of std::unordered_map.
@@ -5360,20 +5240,20 @@ template<typename KeyT, typename ValueT>
class VmaMap
{
public:
- typedef VmaPair<KeyT, ValueT> PairType;
- typedef PairType* iterator;
+ typedef VmaPair<KeyT, ValueT> PairType;
+ typedef PairType* iterator;
- VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
+ VmaMap(const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
- iterator begin() { return m_Vector.begin(); }
- iterator end() { return m_Vector.end(); }
+ iterator begin() { return m_Vector.begin(); }
+ iterator end() { return m_Vector.end(); }
- void insert(const PairType& pair);
- iterator find(const KeyT& key);
- void erase(iterator it);
-
+ void insert(const PairType& pair);
+ iterator find(const KeyT& key);
+ void erase(iterator it);
+
private:
- VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
+ VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
};
#define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
@@ -5381,49 +5261,49 @@ private:
template<typename FirstT, typename SecondT>
struct VmaPairFirstLess
{
- bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
- {
- return lhs.first < rhs.first;
- }
- bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
- {
- return lhs.first < rhsFirst;
- }
+ bool operator()(const VmaPair<FirstT, SecondT>& lhs, const VmaPair<FirstT, SecondT>& rhs) const
+ {
+ return lhs.first < rhs.first;
+ }
+ bool operator()(const VmaPair<FirstT, SecondT>& lhs, const FirstT& rhsFirst) const
+ {
+ return lhs.first < rhsFirst;
+ }
};
template<typename KeyT, typename ValueT>
void VmaMap<KeyT, ValueT>::insert(const PairType& pair)
{
- const size_t indexToInsert = VmaBinaryFindFirstNotLess(
- m_Vector.data(),
- m_Vector.data() + m_Vector.size(),
- pair,
- VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
- VmaVectorInsert(m_Vector, indexToInsert, pair);
+ const size_t indexToInsert = VmaBinaryFindFirstNotLess(
+ m_Vector.data(),
+ m_Vector.data() + m_Vector.size(),
+ pair,
+ VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
+ VmaVectorInsert(m_Vector, indexToInsert, pair);
}
template<typename KeyT, typename ValueT>
VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(const KeyT& key)
{
- PairType* it = VmaBinaryFindFirstNotLess(
- m_Vector.data(),
- m_Vector.data() + m_Vector.size(),
- key,
- VmaPairFirstLess<KeyT, ValueT>());
- if((it != m_Vector.end()) && (it->first == key))
- {
- return it;
- }
- else
- {
- return m_Vector.end();
- }
+ PairType* it = VmaBinaryFindFirstNotLess(
+ m_Vector.data(),
+ m_Vector.data() + m_Vector.size(),
+ key,
+ VmaPairFirstLess<KeyT, ValueT>());
+ if((it != m_Vector.end()) && (it->first == key))
+ {
+ return it;
+ }
+ else
+ {
+ return m_Vector.end();
+ }
}
template<typename KeyT, typename ValueT>
void VmaMap<KeyT, ValueT>::erase(iterator it)
{
- VmaVectorRemove(m_Vector, it - m_Vector.begin());
+ VmaVectorRemove(m_Vector, it - m_Vector.begin());
}
#endif // #if VMA_USE_STL_UNORDERED_MAP
@@ -5439,222 +5319,223 @@ enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
struct VmaAllocation_T
{
private:
- static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
+ static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
- enum FLAGS
- {
- FLAG_USER_DATA_STRING = 0x01,
- };
+ enum FLAGS
+ {
+ FLAG_USER_DATA_STRING = 0x01,
+ };
public:
- enum ALLOCATION_TYPE
- {
- ALLOCATION_TYPE_NONE,
- ALLOCATION_TYPE_BLOCK,
- ALLOCATION_TYPE_DEDICATED,
- };
-
- /*
- This struct is allocated using VmaPoolAllocator.
- */
-
- VmaAllocation_T(uint32_t currentFrameIndex, bool userDataString) :
- m_Alignment{1},
- m_Size{0},
- m_pUserData{VMA_NULL},
- m_LastUseFrameIndex{currentFrameIndex},
- m_MemoryTypeIndex{0},
- m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
- m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
- m_MapCount{0},
- m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
- {
+ enum ALLOCATION_TYPE
+ {
+ ALLOCATION_TYPE_NONE,
+ ALLOCATION_TYPE_BLOCK,
+ ALLOCATION_TYPE_DEDICATED,
+ };
+
+ /*
+ This struct is allocated using VmaPoolAllocator.
+ */
+
+ void Ctor(uint32_t currentFrameIndex, bool userDataString)
+ {
+ m_Alignment = 1;
+ m_Size = 0;
+ m_MemoryTypeIndex = 0;
+ m_pUserData = VMA_NULL;
+ m_LastUseFrameIndex = currentFrameIndex;
+ m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
+ m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
+ m_MapCount = 0;
+ m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
+
#if VMA_STATS_STRING_ENABLED
- m_CreationFrameIndex = currentFrameIndex;
- m_BufferImageUsage = 0;
+ m_CreationFrameIndex = currentFrameIndex;
+ m_BufferImageUsage = 0;
#endif
- }
-
- ~VmaAllocation_T()
- {
- VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
-
- // Check if owned string was freed.
- VMA_ASSERT(m_pUserData == VMA_NULL);
- }
-
- void InitBlockAllocation(
- VmaDeviceMemoryBlock* block,
- VkDeviceSize offset,
- VkDeviceSize alignment,
- VkDeviceSize size,
- uint32_t memoryTypeIndex,
- VmaSuballocationType suballocationType,
- bool mapped,
- bool canBecomeLost)
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- VMA_ASSERT(block != VMA_NULL);
- m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
- m_Alignment = alignment;
- m_Size = size;
- m_MemoryTypeIndex = memoryTypeIndex;
- m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
- m_SuballocationType = (uint8_t)suballocationType;
- m_BlockAllocation.m_Block = block;
- m_BlockAllocation.m_Offset = offset;
- m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
- }
-
- void InitLost()
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
- m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
- m_MemoryTypeIndex = 0;
- m_BlockAllocation.m_Block = VMA_NULL;
- m_BlockAllocation.m_Offset = 0;
- m_BlockAllocation.m_CanBecomeLost = true;
- }
-
- void ChangeBlockAllocation(
- VmaAllocator hAllocator,
- VmaDeviceMemoryBlock* block,
- VkDeviceSize offset);
-
- void ChangeOffset(VkDeviceSize newOffset);
-
- // pMappedData not null means allocation is created with MAPPED flag.
- void InitDedicatedAllocation(
- uint32_t memoryTypeIndex,
- VkDeviceMemory hMemory,
- VmaSuballocationType suballocationType,
- void* pMappedData,
- VkDeviceSize size)
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
- VMA_ASSERT(hMemory != VK_NULL_HANDLE);
- m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
- m_Alignment = 0;
- m_Size = size;
- m_MemoryTypeIndex = memoryTypeIndex;
- m_SuballocationType = (uint8_t)suballocationType;
- m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
- m_DedicatedAllocation.m_hMemory = hMemory;
- m_DedicatedAllocation.m_pMappedData = pMappedData;
- }
-
- ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
- VkDeviceSize GetAlignment() const { return m_Alignment; }
- VkDeviceSize GetSize() const { return m_Size; }
- bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
- void* GetUserData() const { return m_pUserData; }
- void SetUserData(VmaAllocator hAllocator, void* pUserData);
- VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
-
- VmaDeviceMemoryBlock* GetBlock() const
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- return m_BlockAllocation.m_Block;
- }
- VkDeviceSize GetOffset() const;
- VkDeviceMemory GetMemory() const;
- uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
- bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
- void* GetMappedData() const;
- bool CanBecomeLost() const;
-
- uint32_t GetLastUseFrameIndex() const
- {
- return m_LastUseFrameIndex.load();
- }
- bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
- {
- return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
- }
- /*
- - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
- makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
- - Else, returns false.
-
- If hAllocation is already lost, assert - you should not call it then.
- If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
- */
- bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
- {
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
- outInfo.blockCount = 1;
- outInfo.allocationCount = 1;
- outInfo.unusedRangeCount = 0;
- outInfo.usedBytes = m_Size;
- outInfo.unusedBytes = 0;
- outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
- }
-
- void BlockAllocMap();
- void BlockAllocUnmap();
- VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
- void DedicatedAllocUnmap(VmaAllocator hAllocator);
+ }
+
+ void Dtor()
+ {
+ VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 && "Allocation was not unmapped before destruction.");
+
+ // Check if owned string was freed.
+ VMA_ASSERT(m_pUserData == VMA_NULL);
+ }
+
+ void InitBlockAllocation(
+ VmaDeviceMemoryBlock* block,
+ VkDeviceSize offset,
+ VkDeviceSize alignment,
+ VkDeviceSize size,
+ uint32_t memoryTypeIndex,
+ VmaSuballocationType suballocationType,
+ bool mapped,
+ bool canBecomeLost)
+ {
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+ VMA_ASSERT(block != VMA_NULL);
+ m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
+ m_Alignment = alignment;
+ m_Size = size;
+ m_MemoryTypeIndex = memoryTypeIndex;
+ m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
+ m_SuballocationType = (uint8_t)suballocationType;
+ m_BlockAllocation.m_Block = block;
+ m_BlockAllocation.m_Offset = offset;
+ m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
+ }
+
+ void InitLost()
+ {
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+ VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
+ m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
+ m_MemoryTypeIndex = 0;
+ m_BlockAllocation.m_Block = VMA_NULL;
+ m_BlockAllocation.m_Offset = 0;
+ m_BlockAllocation.m_CanBecomeLost = true;
+ }
+
+ void ChangeBlockAllocation(
+ VmaAllocator hAllocator,
+ VmaDeviceMemoryBlock* block,
+ VkDeviceSize offset);
+
+ void ChangeOffset(VkDeviceSize newOffset);
+
+ // pMappedData not null means allocation is created with MAPPED flag.
+ void InitDedicatedAllocation(
+ uint32_t memoryTypeIndex,
+ VkDeviceMemory hMemory,
+ VmaSuballocationType suballocationType,
+ void* pMappedData,
+ VkDeviceSize size)
+ {
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
+ VMA_ASSERT(hMemory != VK_NULL_HANDLE);
+ m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
+ m_Alignment = 0;
+ m_Size = size;
+ m_MemoryTypeIndex = memoryTypeIndex;
+ m_SuballocationType = (uint8_t)suballocationType;
+ m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
+ m_DedicatedAllocation.m_hMemory = hMemory;
+ m_DedicatedAllocation.m_pMappedData = pMappedData;
+ }
+
+ ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; }
+ VkDeviceSize GetAlignment() const { return m_Alignment; }
+ VkDeviceSize GetSize() const { return m_Size; }
+ bool IsUserDataString() const { return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
+ void* GetUserData() const { return m_pUserData; }
+ void SetUserData(VmaAllocator hAllocator, void* pUserData);
+ VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; }
+
+ VmaDeviceMemoryBlock* GetBlock() const
+ {
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+ return m_BlockAllocation.m_Block;
+ }
+ VkDeviceSize GetOffset() const;
+ VkDeviceMemory GetMemory() const;
+ uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
+ bool IsPersistentMap() const { return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
+ void* GetMappedData() const;
+ bool CanBecomeLost() const;
+
+ uint32_t GetLastUseFrameIndex() const
+ {
+ return m_LastUseFrameIndex.load();
+ }
+ bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
+ {
+ return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
+ }
+ /*
+ - If hAllocation.LastUseFrameIndex + frameInUseCount < allocator.CurrentFrameIndex,
+ makes it lost by setting LastUseFrameIndex = VMA_FRAME_INDEX_LOST and returns true.
+ - Else, returns false.
+
+ If hAllocation is already lost, assert - you should not call it then.
+ If hAllocation was not created with CAN_BECOME_LOST_BIT, assert.
+ */
+ bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+ void DedicatedAllocCalcStatsInfo(VmaStatInfo& outInfo)
+ {
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
+ outInfo.blockCount = 1;
+ outInfo.allocationCount = 1;
+ outInfo.unusedRangeCount = 0;
+ outInfo.usedBytes = m_Size;
+ outInfo.unusedBytes = 0;
+ outInfo.allocationSizeMin = outInfo.allocationSizeMax = m_Size;
+ outInfo.unusedRangeSizeMin = UINT64_MAX;
+ outInfo.unusedRangeSizeMax = 0;
+ }
+
+ void BlockAllocMap();
+ void BlockAllocUnmap();
+ VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData);
+ void DedicatedAllocUnmap(VmaAllocator hAllocator);
#if VMA_STATS_STRING_ENABLED
- uint32_t GetCreationFrameIndex() const { return m_CreationFrameIndex; }
- uint32_t GetBufferImageUsage() const { return m_BufferImageUsage; }
+ uint32_t GetCreationFrameIndex() const { return m_CreationFrameIndex; }
+ uint32_t GetBufferImageUsage() const { return m_BufferImageUsage; }
- void InitBufferImageUsage(uint32_t bufferImageUsage)
- {
- VMA_ASSERT(m_BufferImageUsage == 0);
- m_BufferImageUsage = bufferImageUsage;
- }
+ void InitBufferImageUsage(uint32_t bufferImageUsage)
+ {
+ VMA_ASSERT(m_BufferImageUsage == 0);
+ m_BufferImageUsage = bufferImageUsage;
+ }
- void PrintParameters(class VmaJsonWriter& json) const;
+ void PrintParameters(class VmaJsonWriter& json) const;
#endif
private:
- VkDeviceSize m_Alignment;
- VkDeviceSize m_Size;
- void* m_pUserData;
- VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
- uint32_t m_MemoryTypeIndex;
- uint8_t m_Type; // ALLOCATION_TYPE
- uint8_t m_SuballocationType; // VmaSuballocationType
- // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
- // Bits with mask 0x7F are reference counter for vmaMapMemory()/vmaUnmapMemory().
- uint8_t m_MapCount;
- uint8_t m_Flags; // enum FLAGS
-
- // Allocation out of VmaDeviceMemoryBlock.
- struct BlockAllocation
- {
- VmaDeviceMemoryBlock* m_Block;
- VkDeviceSize m_Offset;
- bool m_CanBecomeLost;
- };
-
- // Allocation for an object that has its own private VkDeviceMemory.
- struct DedicatedAllocation
- {
- VkDeviceMemory m_hMemory;
- void* m_pMappedData; // Not null means memory is mapped.
- };
-
- union
- {
- // Allocation out of VmaDeviceMemoryBlock.
- BlockAllocation m_BlockAllocation;
- // Allocation for an object that has its own private VkDeviceMemory.
- DedicatedAllocation m_DedicatedAllocation;
- };
+ VkDeviceSize m_Alignment;
+ VkDeviceSize m_Size;
+ void* m_pUserData;
+ VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
+ uint32_t m_MemoryTypeIndex;
+ uint8_t m_Type; // ALLOCATION_TYPE
+ uint8_t m_SuballocationType; // VmaSuballocationType
+ // Bit 0x80 is set when allocation was created with VMA_ALLOCATION_CREATE_MAPPED_BIT.
+ // Bits with mask 0x7F are reference counter for vmaMapMemory()/vmaUnmapMemory().
+ uint8_t m_MapCount;
+ uint8_t m_Flags; // enum FLAGS
+
+ // Allocation out of VmaDeviceMemoryBlock.
+ struct BlockAllocation
+ {
+ VmaDeviceMemoryBlock* m_Block;
+ VkDeviceSize m_Offset;
+ bool m_CanBecomeLost;
+ };
+
+ // Allocation for an object that has its own private VkDeviceMemory.
+ struct DedicatedAllocation
+ {
+ VkDeviceMemory m_hMemory;
+ void* m_pMappedData; // Not null means memory is mapped.
+ };
+
+ union
+ {
+ // Allocation out of VmaDeviceMemoryBlock.
+ BlockAllocation m_BlockAllocation;
+ // Allocation for an object that has its own private VkDeviceMemory.
+ DedicatedAllocation m_DedicatedAllocation;
+ };
#if VMA_STATS_STRING_ENABLED
- uint32_t m_CreationFrameIndex;
- uint32_t m_BufferImageUsage; // 0 if unknown.
+ uint32_t m_CreationFrameIndex;
+ uint32_t m_BufferImageUsage; // 0 if unknown.
#endif
- void FreeUserDataString(VmaAllocator hAllocator);
+ void FreeUserDataString(VmaAllocator hAllocator);
};
/*
@@ -5663,26 +5544,26 @@ allocated memory block or free.
*/
struct VmaSuballocation
{
- VkDeviceSize offset;
- VkDeviceSize size;
- VmaAllocation hAllocation;
- VmaSuballocationType type;
+ VkDeviceSize offset;
+ VkDeviceSize size;
+ VmaAllocation hAllocation;
+ VmaSuballocationType type;
};
// Comparator for offsets.
struct VmaSuballocationOffsetLess
{
- bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
- {
- return lhs.offset < rhs.offset;
- }
+ bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
+ {
+ return lhs.offset < rhs.offset;
+ }
};
struct VmaSuballocationOffsetGreater
{
- bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
- {
- return lhs.offset > rhs.offset;
- }
+ bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const
+ {
+ return lhs.offset > rhs.offset;
+ }
};
typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
@@ -5692,11 +5573,11 @@ static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
enum class VmaAllocationRequestType
{
- Normal,
- // Used by "Linear" algorithm.
- UpperAddress,
- EndOf1st,
- EndOf2nd,
+ Normal,
+ // Used by "Linear" algorithm.
+ UpperAddress,
+ EndOf1st,
+ EndOf2nd,
};
/*
@@ -5714,18 +5595,18 @@ If canMakeOtherLost was true:
*/
struct VmaAllocationRequest
{
- VkDeviceSize offset;
- VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
- VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
- VmaSuballocationList::iterator item;
- size_t itemsToMakeLostCount;
- void* customData;
- VmaAllocationRequestType type;
-
- VkDeviceSize CalcCost() const
- {
- return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
- }
+ VkDeviceSize offset;
+ VkDeviceSize sumFreeSize; // Sum size of free items that overlap with proposed allocation.
+ VkDeviceSize sumItemSize; // Sum size of items to make lost that overlap with proposed allocation.
+ VmaSuballocationList::iterator item;
+ size_t itemsToMakeLostCount;
+ void* customData;
+ VmaAllocationRequestType type;
+
+ VkDeviceSize CalcCost() const
+ {
+ return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
+ }
};
/*
@@ -5735,188 +5616,188 @@ in a single VkDeviceMemory block.
class VmaBlockMetadata
{
public:
- VmaBlockMetadata(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata() { }
- virtual void Init(VkDeviceSize size) { m_Size = size; }
-
- // Validates all data structures inside this object. If not valid, returns false.
- virtual bool Validate() const = 0;
- VkDeviceSize GetSize() const { return m_Size; }
- virtual size_t GetAllocationCount() const = 0;
- virtual VkDeviceSize GetSumFreeSize() const = 0;
- virtual VkDeviceSize GetUnusedRangeSizeMax() const = 0;
- // Returns true if this block is empty - contains only single free suballocation.
- virtual bool IsEmpty() const = 0;
-
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const = 0;
- // Shouldn't modify blockCount.
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const = 0;
+ VmaBlockMetadata(VmaAllocator hAllocator);
+ virtual ~VmaBlockMetadata() { }
+ virtual void Init(VkDeviceSize size) { m_Size = size; }
+
+ // Validates all data structures inside this object. If not valid, returns false.
+ virtual bool Validate() const = 0;
+ VkDeviceSize GetSize() const { return m_Size; }
+ virtual size_t GetAllocationCount() const = 0;
+ virtual VkDeviceSize GetSumFreeSize() const = 0;
+ virtual VkDeviceSize GetUnusedRangeSizeMax() const = 0;
+ // Returns true if this block is empty - contains only single free suballocation.
+ virtual bool IsEmpty() const = 0;
+
+ virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const = 0;
+ // Shouldn't modify blockCount.
+ virtual void AddPoolStats(VmaPoolStats& inoutStats) const = 0;
#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0;
+ virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0;
#endif
- // Tries to find a place for suballocation with given parameters inside this block.
- // If succeeded, fills pAllocationRequest and returns true.
- // If failed, returns false.
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags.
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest) = 0;
-
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest) = 0;
-
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
-
- virtual VkResult CheckCorruption(const void* pBlockData) = 0;
-
- // Makes actual allocation based on request. Request must already be checked and valid.
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation) = 0;
-
- // Frees suballocation assigned to given memory region.
- virtual void Free(const VmaAllocation allocation) = 0;
- virtual void FreeAtOffset(VkDeviceSize offset) = 0;
+ // Tries to find a place for suballocation with given parameters inside this block.
+ // If succeeded, fills pAllocationRequest and returns true.
+ // If failed, returns false.
+ virtual bool CreateAllocationRequest(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags.
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest) = 0;
+
+ virtual bool MakeRequestedAllocationsLost(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest) = 0;
+
+ virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
+
+ virtual VkResult CheckCorruption(const void* pBlockData) = 0;
+
+ // Makes actual allocation based on request. Request must already be checked and valid.
+ virtual void Alloc(
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation) = 0;
+
+ // Frees suballocation assigned to given memory region.
+ virtual void Free(const VmaAllocation allocation) = 0;
+ virtual void FreeAtOffset(VkDeviceSize offset) = 0;
protected:
- const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; }
+ const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; }
#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap_Begin(class VmaJsonWriter& json,
- VkDeviceSize unusedBytes,
- size_t allocationCount,
- size_t unusedRangeCount) const;
- void PrintDetailedMap_Allocation(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VmaAllocation hAllocation) const;
- void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VkDeviceSize size) const;
- void PrintDetailedMap_End(class VmaJsonWriter& json) const;
+ void PrintDetailedMap_Begin(class VmaJsonWriter& json,
+ VkDeviceSize unusedBytes,
+ size_t allocationCount,
+ size_t unusedRangeCount) const;
+ void PrintDetailedMap_Allocation(class VmaJsonWriter& json,
+ VkDeviceSize offset,
+ VmaAllocation hAllocation) const;
+ void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
+ VkDeviceSize offset,
+ VkDeviceSize size) const;
+ void PrintDetailedMap_End(class VmaJsonWriter& json) const;
#endif
private:
- VkDeviceSize m_Size;
- const VkAllocationCallbacks* m_pAllocationCallbacks;
+ VkDeviceSize m_Size;
+ const VkAllocationCallbacks* m_pAllocationCallbacks;
};
#define VMA_VALIDATE(cond) do { if(!(cond)) { \
- VMA_ASSERT(0 && "Validation failed: " #cond); \
- return false; \
- } } while(false)
+ VMA_ASSERT(0 && "Validation failed: " #cond); \
+ return false; \
+ } } while(false)
class VmaBlockMetadata_Generic : public VmaBlockMetadata
{
- VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
+ VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
public:
- VmaBlockMetadata_Generic(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata_Generic();
- virtual void Init(VkDeviceSize size);
+ VmaBlockMetadata_Generic(VmaAllocator hAllocator);
+ virtual ~VmaBlockMetadata_Generic();
+ virtual void Init(VkDeviceSize size);
- virtual bool Validate() const;
- virtual size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
- virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
- virtual VkDeviceSize GetUnusedRangeSizeMax() const;
- virtual bool IsEmpty() const;
+ virtual bool Validate() const;
+ virtual size_t GetAllocationCount() const { return m_Suballocations.size() - m_FreeCount; }
+ virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
+ virtual VkDeviceSize GetUnusedRangeSizeMax() const;
+ virtual bool IsEmpty() const;
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
+ virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+ virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
+ virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
#endif
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest);
-
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
-
- virtual VkResult CheckCorruption(const void* pBlockData);
-
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation);
-
- virtual void Free(const VmaAllocation allocation);
- virtual void FreeAtOffset(VkDeviceSize offset);
-
- ////////////////////////////////////////////////////////////////////////////////
- // For defragmentation
-
- bool IsBufferImageGranularityConflictPossible(
- VkDeviceSize bufferImageGranularity,
- VmaSuballocationType& inOutPrevSuballocType) const;
+ virtual bool CreateAllocationRequest(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest);
+
+ virtual bool MakeRequestedAllocationsLost(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest);
+
+ virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+
+ virtual VkResult CheckCorruption(const void* pBlockData);
+
+ virtual void Alloc(
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation);
+
+ virtual void Free(const VmaAllocation allocation);
+ virtual void FreeAtOffset(VkDeviceSize offset);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // For defragmentation
+
+ bool IsBufferImageGranularityConflictPossible(
+ VkDeviceSize bufferImageGranularity,
+ VmaSuballocationType& inOutPrevSuballocType) const;
private:
- friend class VmaDefragmentationAlgorithm_Generic;
- friend class VmaDefragmentationAlgorithm_Fast;
-
- uint32_t m_FreeCount;
- VkDeviceSize m_SumFreeSize;
- VmaSuballocationList m_Suballocations;
- // Suballocations that are free and have size greater than certain threshold.
- // Sorted by size, ascending.
- VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
-
- bool ValidateFreeSuballocationList() const;
-
- // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
- // If yes, fills pOffset and returns true. If no, returns false.
- bool CheckAllocation(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- VmaSuballocationList::const_iterator suballocItem,
- bool canMakeOtherLost,
- VkDeviceSize* pOffset,
- size_t* itemsToMakeLostCount,
- VkDeviceSize* pSumFreeSize,
- VkDeviceSize* pSumItemSize) const;
- // Given free suballocation, it merges it with following one, which must also be free.
- void MergeFreeWithNext(VmaSuballocationList::iterator item);
- // Releases given suballocation, making it free.
- // Merges it with adjacent free suballocations if applicable.
- // Returns iterator to new free suballocation at this place.
- VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
- // Given free suballocation, it inserts it into sorted list of
- // m_FreeSuballocationsBySize if it's suitable.
- void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
- // Given free suballocation, it removes it from sorted list of
- // m_FreeSuballocationsBySize if it's suitable.
- void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
+ friend class VmaDefragmentationAlgorithm_Generic;
+ friend class VmaDefragmentationAlgorithm_Fast;
+
+ uint32_t m_FreeCount;
+ VkDeviceSize m_SumFreeSize;
+ VmaSuballocationList m_Suballocations;
+ // Suballocations that are free and have size greater than certain threshold.
+ // Sorted by size, ascending.
+ VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
+
+ bool ValidateFreeSuballocationList() const;
+
+ // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem.
+ // If yes, fills pOffset and returns true. If no, returns false.
+ bool CheckAllocation(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ VmaSuballocationList::const_iterator suballocItem,
+ bool canMakeOtherLost,
+ VkDeviceSize* pOffset,
+ size_t* itemsToMakeLostCount,
+ VkDeviceSize* pSumFreeSize,
+ VkDeviceSize* pSumItemSize) const;
+ // Given free suballocation, it merges it with following one, which must also be free.
+ void MergeFreeWithNext(VmaSuballocationList::iterator item);
+ // Releases given suballocation, making it free.
+ // Merges it with adjacent free suballocations if applicable.
+ // Returns iterator to new free suballocation at this place.
+ VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
+ // Given free suballocation, it inserts it into sorted list of
+ // m_FreeSuballocationsBySize if it's suitable.
+ void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
+ // Given free suballocation, it removes it from sorted list of
+ // m_FreeSuballocationsBySize if it's suitable.
+ void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
};
/*
@@ -5924,196 +5805,196 @@ Allocations and their references in internal data structure look like this:
if(m_2ndVectorMode == SECOND_VECTOR_EMPTY):
- 0 +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount]
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount + 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 1st[1st.size() - 1]
- +-------+
- | |
- | |
- | |
+ 0 +-------+
+ | |
+ | |
+ | |
+ +-------+
+ | Alloc | 1st[m_1stNullItemsBeginCount]
+ +-------+
+ | Alloc | 1st[m_1stNullItemsBeginCount + 1]
+ +-------+
+ | ... |
+ +-------+
+ | Alloc | 1st[1st.size() - 1]
+ +-------+
+ | |
+ | |
+ | |
GetSize() +-------+
if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER):
- 0 +-------+
- | Alloc | 2nd[0]
- +-------+
- | Alloc | 2nd[1]
- +-------+
- | ... |
- +-------+
- | Alloc | 2nd[2nd.size() - 1]
- +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount]
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount + 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 1st[1st.size() - 1]
- +-------+
- | |
+ 0 +-------+
+ | Alloc | 2nd[0]
+ +-------+
+ | Alloc | 2nd[1]
+ +-------+
+ | ... |
+ +-------+
+ | Alloc | 2nd[2nd.size() - 1]
+ +-------+
+ | |
+ | |
+ | |
+ +-------+
+ | Alloc | 1st[m_1stNullItemsBeginCount]
+ +-------+
+ | Alloc | 1st[m_1stNullItemsBeginCount + 1]
+ +-------+
+ | ... |
+ +-------+
+ | Alloc | 1st[1st.size() - 1]
+ +-------+
+ | |
GetSize() +-------+
if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK):
- 0 +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount]
- +-------+
- | Alloc | 1st[m_1stNullItemsBeginCount + 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 1st[1st.size() - 1]
- +-------+
- | |
- | |
- | |
- +-------+
- | Alloc | 2nd[2nd.size() - 1]
- +-------+
- | ... |
- +-------+
- | Alloc | 2nd[1]
- +-------+
- | Alloc | 2nd[0]
+ 0 +-------+
+ | |
+ | |
+ | |
+ +-------+
+ | Alloc | 1st[m_1stNullItemsBeginCount]
+ +-------+
+ | Alloc | 1st[m_1stNullItemsBeginCount + 1]
+ +-------+
+ | ... |
+ +-------+
+ | Alloc | 1st[1st.size() - 1]
+ +-------+
+ | |
+ | |
+ | |
+ +-------+
+ | Alloc | 2nd[2nd.size() - 1]
+ +-------+
+ | ... |
+ +-------+
+ | Alloc | 2nd[1]
+ +-------+
+ | Alloc | 2nd[0]
GetSize() +-------+
*/
class VmaBlockMetadata_Linear : public VmaBlockMetadata
{
- VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
+ VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
public:
- VmaBlockMetadata_Linear(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata_Linear();
- virtual void Init(VkDeviceSize size);
+ VmaBlockMetadata_Linear(VmaAllocator hAllocator);
+ virtual ~VmaBlockMetadata_Linear();
+ virtual void Init(VkDeviceSize size);
- virtual bool Validate() const;
- virtual size_t GetAllocationCount() const;
- virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
- virtual VkDeviceSize GetUnusedRangeSizeMax() const;
- virtual bool IsEmpty() const { return GetAllocationCount() == 0; }
+ virtual bool Validate() const;
+ virtual size_t GetAllocationCount() const;
+ virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize; }
+ virtual VkDeviceSize GetUnusedRangeSizeMax() const;
+ virtual bool IsEmpty() const { return GetAllocationCount() == 0; }
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
+ virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+ virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
+ virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
#endif
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
+ virtual bool CreateAllocationRequest(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest);
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest);
+ virtual bool MakeRequestedAllocationsLost(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest);
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+ virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
- virtual VkResult CheckCorruption(const void* pBlockData);
+ virtual VkResult CheckCorruption(const void* pBlockData);
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation);
+ virtual void Alloc(
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation);
- virtual void Free(const VmaAllocation allocation);
- virtual void FreeAtOffset(VkDeviceSize offset);
+ virtual void Free(const VmaAllocation allocation);
+ virtual void FreeAtOffset(VkDeviceSize offset);
private:
- /*
- There are two suballocation vectors, used in ping-pong way.
- The one with index m_1stVectorIndex is called 1st.
- The one with index (m_1stVectorIndex ^ 1) is called 2nd.
- 2nd can be non-empty only when 1st is not empty.
- When 2nd is not empty, m_2ndVectorMode indicates its mode of operation.
- */
- typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
-
- enum SECOND_VECTOR_MODE
- {
- SECOND_VECTOR_EMPTY,
- /*
- Suballocations in 2nd vector are created later than the ones in 1st, but they
- all have smaller offset.
- */
- SECOND_VECTOR_RING_BUFFER,
- /*
- Suballocations in 2nd vector are upper side of double stack.
- They all have offsets higher than those in 1st vector.
- Top of this stack means smaller offsets, but higher indices in this vector.
- */
- SECOND_VECTOR_DOUBLE_STACK,
- };
-
- VkDeviceSize m_SumFreeSize;
- SuballocationVectorType m_Suballocations0, m_Suballocations1;
- uint32_t m_1stVectorIndex;
- SECOND_VECTOR_MODE m_2ndVectorMode;
-
- SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
- SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
- const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
- const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
-
- // Number of items in 1st vector with hAllocation = null at the beginning.
- size_t m_1stNullItemsBeginCount;
- // Number of other items in 1st vector with hAllocation = null somewhere in the middle.
- size_t m_1stNullItemsMiddleCount;
- // Number of items in 2nd vector with hAllocation = null.
- size_t m_2ndNullItemsCount;
-
- bool ShouldCompact1st() const;
- void CleanupAfterFree();
-
- bool CreateAllocationRequest_LowerAddress(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
- bool CreateAllocationRequest_UpperAddress(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
+ /*
+ There are two suballocation vectors, used in ping-pong way.
+ The one with index m_1stVectorIndex is called 1st.
+ The one with index (m_1stVectorIndex ^ 1) is called 2nd.
+ 2nd can be non-empty only when 1st is not empty.
+ When 2nd is not empty, m_2ndVectorMode indicates its mode of operation.
+ */
+ typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
+
+ enum SECOND_VECTOR_MODE
+ {
+ SECOND_VECTOR_EMPTY,
+ /*
+ Suballocations in 2nd vector are created later than the ones in 1st, but they
+ all have smaller offset.
+ */
+ SECOND_VECTOR_RING_BUFFER,
+ /*
+ Suballocations in 2nd vector are upper side of double stack.
+ They all have offsets higher than those in 1st vector.
+ Top of this stack means smaller offsets, but higher indices in this vector.
+ */
+ SECOND_VECTOR_DOUBLE_STACK,
+ };
+
+ VkDeviceSize m_SumFreeSize;
+ SuballocationVectorType m_Suballocations0, m_Suballocations1;
+ uint32_t m_1stVectorIndex;
+ SECOND_VECTOR_MODE m_2ndVectorMode;
+
+ SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
+ SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
+ const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
+ const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
+
+ // Number of items in 1st vector with hAllocation = null at the beginning.
+ size_t m_1stNullItemsBeginCount;
+ // Number of other items in 1st vector with hAllocation = null somewhere in the middle.
+ size_t m_1stNullItemsMiddleCount;
+ // Number of items in 2nd vector with hAllocation = null.
+ size_t m_2ndNullItemsCount;
+
+ bool ShouldCompact1st() const;
+ void CleanupAfterFree();
+
+ bool CreateAllocationRequest_LowerAddress(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest);
+ bool CreateAllocationRequest_UpperAddress(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest);
};
/*
@@ -6129,137 +6010,137 @@ m_LevelCount is the maximum number of levels to use in the current object.
*/
class VmaBlockMetadata_Buddy : public VmaBlockMetadata
{
- VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
+ VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
public:
- VmaBlockMetadata_Buddy(VmaAllocator hAllocator);
- virtual ~VmaBlockMetadata_Buddy();
- virtual void Init(VkDeviceSize size);
+ VmaBlockMetadata_Buddy(VmaAllocator hAllocator);
+ virtual ~VmaBlockMetadata_Buddy();
+ virtual void Init(VkDeviceSize size);
- virtual bool Validate() const;
- virtual size_t GetAllocationCount() const { return m_AllocationCount; }
- virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize + GetUnusableSize(); }
- virtual VkDeviceSize GetUnusedRangeSizeMax() const;
- virtual bool IsEmpty() const { return m_Root->type == Node::TYPE_FREE; }
+ virtual bool Validate() const;
+ virtual size_t GetAllocationCount() const { return m_AllocationCount; }
+ virtual VkDeviceSize GetSumFreeSize() const { return m_SumFreeSize + GetUnusableSize(); }
+ virtual VkDeviceSize GetUnusedRangeSizeMax() const;
+ virtual bool IsEmpty() const { return m_Root->type == Node::TYPE_FREE; }
- virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
- virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
+ virtual void CalcAllocationStatInfo(VmaStatInfo& outInfo) const;
+ virtual void AddPoolStats(VmaPoolStats& inoutStats) const;
#if VMA_STATS_STRING_ENABLED
- virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
+ virtual void PrintDetailedMap(class VmaJsonWriter& json) const;
#endif
- virtual bool CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest);
+ virtual bool CreateAllocationRequest(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest);
- virtual bool MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest);
+ virtual bool MakeRequestedAllocationsLost(
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest);
- virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
+ virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
- virtual VkResult CheckCorruption(const void* pBlockData) { return VK_ERROR_FEATURE_NOT_PRESENT; }
+ virtual VkResult CheckCorruption(const void* pBlockData) { return VK_ERROR_FEATURE_NOT_PRESENT; }
- virtual void Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation);
+ virtual void Alloc(
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation);
- virtual void Free(const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
- virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
+ virtual void Free(const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
+ virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
private:
- static const VkDeviceSize MIN_NODE_SIZE = 32;
- static const size_t MAX_LEVELS = 30;
-
- struct ValidationContext
- {
- size_t calculatedAllocationCount;
- size_t calculatedFreeCount;
- VkDeviceSize calculatedSumFreeSize;
-
- ValidationContext() :
- calculatedAllocationCount(0),
- calculatedFreeCount(0),
- calculatedSumFreeSize(0) { }
- };
-
- struct Node
- {
- VkDeviceSize offset;
- enum TYPE
- {
- TYPE_FREE,
- TYPE_ALLOCATION,
- TYPE_SPLIT,
- TYPE_COUNT
- } type;
- Node* parent;
- Node* buddy;
-
- union
- {
- struct
- {
- Node* prev;
- Node* next;
- } free;
- struct
- {
- VmaAllocation alloc;
- } allocation;
- struct
- {
- Node* leftChild;
- } split;
- };
- };
-
- // Size of the memory block aligned down to a power of two.
- VkDeviceSize m_UsableSize;
- uint32_t m_LevelCount;
-
- Node* m_Root;
- struct {
- Node* front;
- Node* back;
- } m_FreeList[MAX_LEVELS];
- // Number of nodes in the tree with type == TYPE_ALLOCATION.
- size_t m_AllocationCount;
- // Number of nodes in the tree with type == TYPE_FREE.
- size_t m_FreeCount;
- // This includes space wasted due to internal fragmentation. Doesn't include unusable size.
- VkDeviceSize m_SumFreeSize;
-
- VkDeviceSize GetUnusableSize() const { return GetSize() - m_UsableSize; }
- void DeleteNode(Node* node);
- bool ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const;
- uint32_t AllocSizeToLevel(VkDeviceSize allocSize) const;
- inline VkDeviceSize LevelToNodeSize(uint32_t level) const { return m_UsableSize >> level; }
- // Alloc passed just for validation. Can be null.
- void FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset);
- void CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const;
- // Adds node to the front of FreeList at given level.
- // node->type must be FREE.
- // node->free.prev, next can be undefined.
- void AddToFreeListFront(uint32_t level, Node* node);
- // Removes node from FreeList at given level.
- // node->type must be FREE.
- // node->free.prev, next stay untouched.
- void RemoveFromFreeList(uint32_t level, Node* node);
+ static const VkDeviceSize MIN_NODE_SIZE = 32;
+ static const size_t MAX_LEVELS = 30;
+
+ struct ValidationContext
+ {
+ size_t calculatedAllocationCount;
+ size_t calculatedFreeCount;
+ VkDeviceSize calculatedSumFreeSize;
+
+ ValidationContext() :
+ calculatedAllocationCount(0),
+ calculatedFreeCount(0),
+ calculatedSumFreeSize(0) { }
+ };
+
+ struct Node
+ {
+ VkDeviceSize offset;
+ enum TYPE
+ {
+ TYPE_FREE,
+ TYPE_ALLOCATION,
+ TYPE_SPLIT,
+ TYPE_COUNT
+ } type;
+ Node* parent;
+ Node* buddy;
+
+ union
+ {
+ struct
+ {
+ Node* prev;
+ Node* next;
+ } free;
+ struct
+ {
+ VmaAllocation alloc;
+ } allocation;
+ struct
+ {
+ Node* leftChild;
+ } split;
+ };
+ };
+
+ // Size of the memory block aligned down to a power of two.
+ VkDeviceSize m_UsableSize;
+ uint32_t m_LevelCount;
+
+ Node* m_Root;
+ struct {
+ Node* front;
+ Node* back;
+ } m_FreeList[MAX_LEVELS];
+ // Number of nodes in the tree with type == TYPE_ALLOCATION.
+ size_t m_AllocationCount;
+ // Number of nodes in the tree with type == TYPE_FREE.
+ size_t m_FreeCount;
+ // This includes space wasted due to internal fragmentation. Doesn't include unusable size.
+ VkDeviceSize m_SumFreeSize;
+
+ VkDeviceSize GetUnusableSize() const { return GetSize() - m_UsableSize; }
+ void DeleteNode(Node* node);
+ bool ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const;
+ uint32_t AllocSizeToLevel(VkDeviceSize allocSize) const;
+ inline VkDeviceSize LevelToNodeSize(uint32_t level) const { return m_UsableSize >> level; }
+ // Alloc passed just for validation. Can be null.
+ void FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset);
+ void CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const;
+ // Adds node to the front of FreeList at given level.
+ // node->type must be FREE.
+ // node->free.prev, next can be undefined.
+ void AddToFreeListFront(uint32_t level, Node* node);
+ // Removes node from FreeList at given level.
+ // node->type must be FREE.
+ // node->free.prev, next stay untouched.
+ void RemoveFromFreeList(uint32_t level, Node* node);
#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const;
+ void PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const;
#endif
};
@@ -6271,95 +6152,92 @@ Thread-safety: This class must be externally synchronized.
*/
class VmaDeviceMemoryBlock
{
- VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
+ VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
public:
- VmaBlockMetadata* m_pMetadata;
-
- VmaDeviceMemoryBlock(VmaAllocator hAllocator);
-
- ~VmaDeviceMemoryBlock()
- {
- VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
- VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
- }
-
- // Always call after construction.
- void Init(
- VmaAllocator hAllocator,
- VmaPool hParentPool,
- uint32_t newMemoryTypeIndex,
- VkDeviceMemory newMemory,
- VkDeviceSize newSize,
- uint32_t id,
- uint32_t algorithm);
- // Always call before destruction.
- void Destroy(VmaAllocator allocator);
-
- VmaPool GetParentPool() const { return m_hParentPool; }
- VkDeviceMemory GetDeviceMemory() const { return m_hMemory; }
- uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
- uint32_t GetId() const { return m_Id; }
- void* GetMappedData() const { return m_pMappedData; }
-
- // Validates all data structures inside this object. If not valid, returns false.
- bool Validate() const;
-
- VkResult CheckCorruption(VmaAllocator hAllocator);
-
- // ppData can be null.
- VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData);
- void Unmap(VmaAllocator hAllocator, uint32_t count);
-
- VkResult WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
- VkResult ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
-
- VkResult BindBufferMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkBuffer hBuffer,
- const void* pNext);
- VkResult BindImageMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkImage hImage,
- const void* pNext);
+ VmaBlockMetadata* m_pMetadata;
+
+ VmaDeviceMemoryBlock(VmaAllocator hAllocator);
+
+ ~VmaDeviceMemoryBlock()
+ {
+ VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped.");
+ VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+ }
+
+ // Always call after construction.
+ void Init(
+ VmaAllocator hAllocator,
+ VmaPool hParentPool,
+ uint32_t newMemoryTypeIndex,
+ VkDeviceMemory newMemory,
+ VkDeviceSize newSize,
+ uint32_t id,
+ uint32_t algorithm);
+ // Always call before destruction.
+ void Destroy(VmaAllocator allocator);
+
+ VmaPool GetParentPool() const { return m_hParentPool; }
+ VkDeviceMemory GetDeviceMemory() const { return m_hMemory; }
+ uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
+ uint32_t GetId() const { return m_Id; }
+ void* GetMappedData() const { return m_pMappedData; }
+
+ // Validates all data structures inside this object. If not valid, returns false.
+ bool Validate() const;
+
+ VkResult CheckCorruption(VmaAllocator hAllocator);
+
+ // ppData can be null.
+ VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData);
+ void Unmap(VmaAllocator hAllocator, uint32_t count);
+
+ VkResult WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
+ VkResult ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
+
+ VkResult BindBufferMemory(
+ const VmaAllocator hAllocator,
+ const VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkBuffer hBuffer,
+ const void* pNext);
+ VkResult BindImageMemory(
+ const VmaAllocator hAllocator,
+ const VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkImage hImage,
+ const void* pNext);
private:
- VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool.
- uint32_t m_MemoryTypeIndex;
- uint32_t m_Id;
- VkDeviceMemory m_hMemory;
-
- /*
- Protects access to m_hMemory so it's not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory.
- Also protects m_MapCount, m_pMappedData.
- Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex.
- */
- VMA_MUTEX m_Mutex;
- uint32_t m_MapCount;
- void* m_pMappedData;
+ VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool.
+ uint32_t m_MemoryTypeIndex;
+ uint32_t m_Id;
+ VkDeviceMemory m_hMemory;
+
+ /*
+ Protects access to m_hMemory so it's not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory.
+ Also protects m_MapCount, m_pMappedData.
+ Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex.
+ */
+ VMA_MUTEX m_Mutex;
+ uint32_t m_MapCount;
+ void* m_pMappedData;
};
struct VmaPointerLess
{
- bool operator()(const void* lhs, const void* rhs) const
- {
- return lhs < rhs;
- }
+ bool operator()(const void* lhs, const void* rhs) const
+ {
+ return lhs < rhs;
+ }
};
struct VmaDefragmentationMove
{
- size_t srcBlockIndex;
- size_t dstBlockIndex;
- VkDeviceSize srcOffset;
- VkDeviceSize dstOffset;
- VkDeviceSize size;
- VmaAllocation hAllocation;
- VmaDeviceMemoryBlock* pSrcBlock;
- VmaDeviceMemoryBlock* pDstBlock;
+ size_t srcBlockIndex;
+ size_t dstBlockIndex;
+ VkDeviceSize srcOffset;
+ VkDeviceSize dstOffset;
+ VkDeviceSize size;
};
class VmaDefragmentationAlgorithm;
@@ -6372,184 +6250,176 @@ Synchronized internally with a mutex.
*/
struct VmaBlockVector
{
- VMA_CLASS_NO_COPY(VmaBlockVector)
+ VMA_CLASS_NO_COPY(VmaBlockVector)
public:
- VmaBlockVector(
- VmaAllocator hAllocator,
- VmaPool hParentPool,
- uint32_t memoryTypeIndex,
- VkDeviceSize preferredBlockSize,
- size_t minBlockCount,
- size_t maxBlockCount,
- VkDeviceSize bufferImageGranularity,
- uint32_t frameInUseCount,
- bool explicitBlockSize,
- uint32_t algorithm);
- ~VmaBlockVector();
-
- VkResult CreateMinBlocks();
-
- VmaAllocator GetAllocator() const { return m_hAllocator; }
- VmaPool GetParentPool() const { return m_hParentPool; }
- bool IsCustomPool() const { return m_hParentPool != VMA_NULL; }
- uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
- VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
- VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
- uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
- uint32_t GetAlgorithm() const { return m_Algorithm; }
-
- void GetPoolStats(VmaPoolStats* pStats);
-
- bool IsEmpty();
- bool IsCorruptionDetectionEnabled() const;
-
- VkResult Allocate(
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- void Free(const VmaAllocation hAllocation);
-
- // Adds statistics of this BlockVector to pStats.
- void AddStats(VmaStats* pStats);
+ VmaBlockVector(
+ VmaAllocator hAllocator,
+ VmaPool hParentPool,
+ uint32_t memoryTypeIndex,
+ VkDeviceSize preferredBlockSize,
+ size_t minBlockCount,
+ size_t maxBlockCount,
+ VkDeviceSize bufferImageGranularity,
+ uint32_t frameInUseCount,
+ bool explicitBlockSize,
+ uint32_t algorithm);
+ ~VmaBlockVector();
+
+ VkResult CreateMinBlocks();
+
+ VmaAllocator GetAllocator() const { return m_hAllocator; }
+ VmaPool GetParentPool() const { return m_hParentPool; }
+ bool IsCustomPool() const { return m_hParentPool != VMA_NULL; }
+ uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; }
+ VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; }
+ VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; }
+ uint32_t GetFrameInUseCount() const { return m_FrameInUseCount; }
+ uint32_t GetAlgorithm() const { return m_Algorithm; }
+
+ void GetPoolStats(VmaPoolStats* pStats);
+
+ bool IsEmpty();
+ bool IsCorruptionDetectionEnabled() const;
+
+ VkResult Allocate(
+ uint32_t currentFrameIndex,
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaSuballocationType suballocType,
+ size_t allocationCount,
+ VmaAllocation* pAllocations);
+
+ void Free(const VmaAllocation hAllocation);
+
+ // Adds statistics of this BlockVector to pStats.
+ void AddStats(VmaStats* pStats);
#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap(class VmaJsonWriter& json);
+ void PrintDetailedMap(class VmaJsonWriter& json);
#endif
- void MakePoolAllocationsLost(
- uint32_t currentFrameIndex,
- size_t* pLostAllocationCount);
- VkResult CheckCorruption();
-
- // Saves results in pCtx->res.
- void Defragment(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats, VmaDefragmentationFlags flags,
- VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
- VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer);
- void DefragmentationEnd(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats);
-
- uint32_t ProcessDefragmentations(
- class VmaBlockVectorDefragmentationContext *pCtx,
- VmaDefragmentationPassMoveInfo* pMove, uint32_t maxMoves);
-
- void CommitDefragmentations(
- class VmaBlockVectorDefragmentationContext *pCtx,
- VmaDefragmentationStats* pStats);
-
- ////////////////////////////////////////////////////////////////////////////////
- // To be used only while the m_Mutex is locked. Used during defragmentation.
-
- size_t GetBlockCount() const { return m_Blocks.size(); }
- VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; }
- size_t CalcAllocationCount() const;
- bool IsBufferImageGranularityConflictPossible() const;
+ void MakePoolAllocationsLost(
+ uint32_t currentFrameIndex,
+ size_t* pLostAllocationCount);
+ VkResult CheckCorruption();
+
+ // Saves results in pCtx->res.
+ void Defragment(
+ class VmaBlockVectorDefragmentationContext* pCtx,
+ VmaDefragmentationStats* pStats,
+ VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
+ VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
+ VkCommandBuffer commandBuffer);
+ void DefragmentationEnd(
+ class VmaBlockVectorDefragmentationContext* pCtx,
+ VmaDefragmentationStats* pStats);
+
+ ////////////////////////////////////////////////////////////////////////////////
+ // To be used only while the m_Mutex is locked. Used during defragmentation.
+
+ size_t GetBlockCount() const { return m_Blocks.size(); }
+ VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; }
+ size_t CalcAllocationCount() const;
+ bool IsBufferImageGranularityConflictPossible() const;
private:
- friend class VmaDefragmentationAlgorithm_Generic;
-
- const VmaAllocator m_hAllocator;
- const VmaPool m_hParentPool;
- const uint32_t m_MemoryTypeIndex;
- const VkDeviceSize m_PreferredBlockSize;
- const size_t m_MinBlockCount;
- const size_t m_MaxBlockCount;
- const VkDeviceSize m_BufferImageGranularity;
- const uint32_t m_FrameInUseCount;
- const bool m_ExplicitBlockSize;
- const uint32_t m_Algorithm;
- VMA_RW_MUTEX m_Mutex;
-
- /* There can be at most one allocation that is completely empty (except when minBlockCount > 0) -
- a hysteresis to avoid pessimistic case of alternating creation and destruction of a VkDeviceMemory. */
- bool m_HasEmptyBlock;
- // Incrementally sorted by sumFreeSize, ascending.
- VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
- uint32_t m_NextBlockId;
-
- VkDeviceSize CalcMaxBlockSize() const;
-
- // Finds and removes given block from vector.
- void Remove(VmaDeviceMemoryBlock* pBlock);
-
- // Performs single step in sorting m_Blocks. They may not be fully sorted
- // after this call.
- void IncrementallySortBlocks();
-
- VkResult AllocatePage(
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- VmaAllocation* pAllocation);
-
- // To be used only without CAN_MAKE_OTHER_LOST flag.
- VkResult AllocateFromBlock(
- VmaDeviceMemoryBlock* pBlock,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- VmaAllocationCreateFlags allocFlags,
- void* pUserData,
- VmaSuballocationType suballocType,
- uint32_t strategy,
- VmaAllocation* pAllocation);
-
- VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
-
- // Saves result to pCtx->res.
- void ApplyDefragmentationMovesCpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
- // Saves result to pCtx->res.
- void ApplyDefragmentationMovesGpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkCommandBuffer commandBuffer);
-
- /*
- Used during defragmentation. pDefragmentationStats is optional. It's in/out
- - updated with new data.
- */
- void FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats);
-
- void UpdateHasEmptyBlock();
+ friend class VmaDefragmentationAlgorithm_Generic;
+
+ const VmaAllocator m_hAllocator;
+ const VmaPool m_hParentPool;
+ const uint32_t m_MemoryTypeIndex;
+ const VkDeviceSize m_PreferredBlockSize;
+ const size_t m_MinBlockCount;
+ const size_t m_MaxBlockCount;
+ const VkDeviceSize m_BufferImageGranularity;
+ const uint32_t m_FrameInUseCount;
+ const bool m_ExplicitBlockSize;
+ const uint32_t m_Algorithm;
+ VMA_RW_MUTEX m_Mutex;
+
+ /* There can be at most one allocation that is completely empty (except when minBlockCount > 0) -
+ a hysteresis to avoid pessimistic case of alternating creation and destruction of a VkDeviceMemory. */
+ bool m_HasEmptyBlock;
+ // Incrementally sorted by sumFreeSize, ascending.
+ VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
+ uint32_t m_NextBlockId;
+
+ VkDeviceSize CalcMaxBlockSize() const;
+
+ // Finds and removes given block from vector.
+ void Remove(VmaDeviceMemoryBlock* pBlock);
+
+ // Performs single step in sorting m_Blocks. They may not be fully sorted
+ // after this call.
+ void IncrementallySortBlocks();
+
+ VkResult AllocatePage(
+ uint32_t currentFrameIndex,
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaSuballocationType suballocType,
+ VmaAllocation* pAllocation);
+
+ // To be used only without CAN_MAKE_OTHER_LOST flag.
+ VkResult AllocateFromBlock(
+ VmaDeviceMemoryBlock* pBlock,
+ uint32_t currentFrameIndex,
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ VmaAllocationCreateFlags allocFlags,
+ void* pUserData,
+ VmaSuballocationType suballocType,
+ uint32_t strategy,
+ VmaAllocation* pAllocation);
+
+ VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex);
+
+ // Saves result to pCtx->res.
+ void ApplyDefragmentationMovesCpu(
+ class VmaBlockVectorDefragmentationContext* pDefragCtx,
+ const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
+ // Saves result to pCtx->res.
+ void ApplyDefragmentationMovesGpu(
+ class VmaBlockVectorDefragmentationContext* pDefragCtx,
+ const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkCommandBuffer commandBuffer);
+
+ /*
+ Used during defragmentation. pDefragmentationStats is optional. It's in/out
+ - updated with new data.
+ */
+ void FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats);
+
+ void UpdateHasEmptyBlock();
};
struct VmaPool_T
{
- VMA_CLASS_NO_COPY(VmaPool_T)
+ VMA_CLASS_NO_COPY(VmaPool_T)
public:
- VmaBlockVector m_BlockVector;
+ VmaBlockVector m_BlockVector;
- VmaPool_T(
- VmaAllocator hAllocator,
- const VmaPoolCreateInfo& createInfo,
- VkDeviceSize preferredBlockSize);
- ~VmaPool_T();
+ VmaPool_T(
+ VmaAllocator hAllocator,
+ const VmaPoolCreateInfo& createInfo,
+ VkDeviceSize preferredBlockSize);
+ ~VmaPool_T();
- uint32_t GetId() const { return m_Id; }
- void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
+ uint32_t GetId() const { return m_Id; }
+ void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
- const char* GetName() const { return m_Name; }
- void SetName(const char* pName);
+ const char* GetName() const { return m_Name; }
+ void SetName(const char* pName);
#if VMA_STATS_STRING_ENABLED
- //void PrintDetailedMap(class VmaStringBuilder& sb);
+ //void PrintDetailedMap(class VmaStringBuilder& sb);
#endif
private:
- uint32_t m_Id;
- char* m_Name;
+ uint32_t m_Id;
+ char* m_Name;
};
/*
@@ -6561,439 +6431,422 @@ Performs defragmentation:
*/
class VmaDefragmentationAlgorithm
{
- VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
+ VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
public:
- VmaDefragmentationAlgorithm(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex) :
- m_hAllocator(hAllocator),
- m_pBlockVector(pBlockVector),
- m_CurrentFrameIndex(currentFrameIndex)
- {
- }
- virtual ~VmaDefragmentationAlgorithm()
- {
- }
-
- virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) = 0;
- virtual void AddAll() = 0;
-
- virtual VkResult Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- VmaDefragmentationFlags flags) = 0;
-
- virtual VkDeviceSize GetBytesMoved() const = 0;
- virtual uint32_t GetAllocationsMoved() const = 0;
+ VmaDefragmentationAlgorithm(
+ VmaAllocator hAllocator,
+ VmaBlockVector* pBlockVector,
+ uint32_t currentFrameIndex) :
+ m_hAllocator(hAllocator),
+ m_pBlockVector(pBlockVector),
+ m_CurrentFrameIndex(currentFrameIndex)
+ {
+ }
+ virtual ~VmaDefragmentationAlgorithm()
+ {
+ }
+
+ virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) = 0;
+ virtual void AddAll() = 0;
+
+ virtual VkResult Defragment(
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove) = 0;
+
+ virtual VkDeviceSize GetBytesMoved() const = 0;
+ virtual uint32_t GetAllocationsMoved() const = 0;
protected:
- VmaAllocator const m_hAllocator;
- VmaBlockVector* const m_pBlockVector;
- const uint32_t m_CurrentFrameIndex;
-
- struct AllocationInfo
- {
- VmaAllocation m_hAllocation;
- VkBool32* m_pChanged;
-
- AllocationInfo() :
- m_hAllocation(VK_NULL_HANDLE),
- m_pChanged(VMA_NULL)
- {
- }
- AllocationInfo(VmaAllocation hAlloc, VkBool32* pChanged) :
- m_hAllocation(hAlloc),
- m_pChanged(pChanged)
- {
- }
- };
+ VmaAllocator const m_hAllocator;
+ VmaBlockVector* const m_pBlockVector;
+ const uint32_t m_CurrentFrameIndex;
+
+ struct AllocationInfo
+ {
+ VmaAllocation m_hAllocation;
+ VkBool32* m_pChanged;
+
+ AllocationInfo() :
+ m_hAllocation(VK_NULL_HANDLE),
+ m_pChanged(VMA_NULL)
+ {
+ }
+ AllocationInfo(VmaAllocation hAlloc, VkBool32* pChanged) :
+ m_hAllocation(hAlloc),
+ m_pChanged(pChanged)
+ {
+ }
+ };
};
class VmaDefragmentationAlgorithm_Generic : public VmaDefragmentationAlgorithm
{
- VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
+ VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
public:
- VmaDefragmentationAlgorithm_Generic(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported);
- virtual ~VmaDefragmentationAlgorithm_Generic();
+ VmaDefragmentationAlgorithm_Generic(
+ VmaAllocator hAllocator,
+ VmaBlockVector* pBlockVector,
+ uint32_t currentFrameIndex,
+ bool overlappingMoveSupported);
+ virtual ~VmaDefragmentationAlgorithm_Generic();
- virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
- virtual void AddAll() { m_AllAllocations = true; }
+ virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
+ virtual void AddAll() { m_AllAllocations = true; }
- virtual VkResult Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- VmaDefragmentationFlags flags);
+ virtual VkResult Defragment(
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove);
- virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
- virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
+ virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
+ virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
private:
- uint32_t m_AllocationCount;
- bool m_AllAllocations;
-
- VkDeviceSize m_BytesMoved;
- uint32_t m_AllocationsMoved;
-
- struct AllocationInfoSizeGreater
- {
- bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
- {
- return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
- }
- };
-
- struct AllocationInfoOffsetGreater
- {
- bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
- {
- return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
- }
- };
-
- struct BlockInfo
- {
- size_t m_OriginalBlockIndex;
- VmaDeviceMemoryBlock* m_pBlock;
- bool m_HasNonMovableAllocations;
- VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
-
- BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
- m_OriginalBlockIndex(SIZE_MAX),
- m_pBlock(VMA_NULL),
- m_HasNonMovableAllocations(true),
- m_Allocations(pAllocationCallbacks)
- {
- }
-
- void CalcHasNonMovableAllocations()
- {
- const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
- const size_t defragmentAllocCount = m_Allocations.size();
- m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
- }
-
- void SortAllocationsBySizeDescending()
- {
- VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
- }
-
- void SortAllocationsByOffsetDescending()
- {
- VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
- }
- };
-
- struct BlockPointerLess
- {
- bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
- {
- return pLhsBlockInfo->m_pBlock < pRhsBlock;
- }
- bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
- {
- return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
- }
- };
-
- // 1. Blocks with some non-movable allocations go first.
- // 2. Blocks with smaller sumFreeSize go first.
- struct BlockInfoCompareMoveDestination
- {
- bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
- {
- if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
- {
- return true;
- }
- if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
- {
- return false;
- }
- if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
- {
- return true;
- }
- return false;
- }
- };
-
- typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
- BlockInfoVector m_Blocks;
-
- VkResult DefragmentRound(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- bool freeOldAllocations);
-
- size_t CalcBlocksWithNonMovableCount() const;
-
- static bool MoveMakesSense(
- size_t dstBlockIndex, VkDeviceSize dstOffset,
- size_t srcBlockIndex, VkDeviceSize srcOffset);
+ uint32_t m_AllocationCount;
+ bool m_AllAllocations;
+
+ VkDeviceSize m_BytesMoved;
+ uint32_t m_AllocationsMoved;
+
+ struct AllocationInfoSizeGreater
+ {
+ bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
+ {
+ return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
+ }
+ };
+
+ struct AllocationInfoOffsetGreater
+ {
+ bool operator()(const AllocationInfo& lhs, const AllocationInfo& rhs) const
+ {
+ return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
+ }
+ };
+
+ struct BlockInfo
+ {
+ size_t m_OriginalBlockIndex;
+ VmaDeviceMemoryBlock* m_pBlock;
+ bool m_HasNonMovableAllocations;
+ VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
+
+ BlockInfo(const VkAllocationCallbacks* pAllocationCallbacks) :
+ m_OriginalBlockIndex(SIZE_MAX),
+ m_pBlock(VMA_NULL),
+ m_HasNonMovableAllocations(true),
+ m_Allocations(pAllocationCallbacks)
+ {
+ }
+
+ void CalcHasNonMovableAllocations()
+ {
+ const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
+ const size_t defragmentAllocCount = m_Allocations.size();
+ m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
+ }
+
+ void SortAllocationsBySizeDescending()
+ {
+ VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
+ }
+
+ void SortAllocationsByOffsetDescending()
+ {
+ VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
+ }
+ };
+
+ struct BlockPointerLess
+ {
+ bool operator()(const BlockInfo* pLhsBlockInfo, const VmaDeviceMemoryBlock* pRhsBlock) const
+ {
+ return pLhsBlockInfo->m_pBlock < pRhsBlock;
+ }
+ bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
+ {
+ return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
+ }
+ };
+
+ // 1. Blocks with some non-movable allocations go first.
+ // 2. Blocks with smaller sumFreeSize go first.
+ struct BlockInfoCompareMoveDestination
+ {
+ bool operator()(const BlockInfo* pLhsBlockInfo, const BlockInfo* pRhsBlockInfo) const
+ {
+ if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
+ {
+ return true;
+ }
+ if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
+ {
+ return false;
+ }
+ if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
+ {
+ return true;
+ }
+ return false;
+ }
+ };
+
+ typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
+ BlockInfoVector m_Blocks;
+
+ VkResult DefragmentRound(
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove);
+
+ size_t CalcBlocksWithNonMovableCount() const;
+
+ static bool MoveMakesSense(
+ size_t dstBlockIndex, VkDeviceSize dstOffset,
+ size_t srcBlockIndex, VkDeviceSize srcOffset);
};
class VmaDefragmentationAlgorithm_Fast : public VmaDefragmentationAlgorithm
{
- VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
+ VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
public:
- VmaDefragmentationAlgorithm_Fast(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported);
- virtual ~VmaDefragmentationAlgorithm_Fast();
+ VmaDefragmentationAlgorithm_Fast(
+ VmaAllocator hAllocator,
+ VmaBlockVector* pBlockVector,
+ uint32_t currentFrameIndex,
+ bool overlappingMoveSupported);
+ virtual ~VmaDefragmentationAlgorithm_Fast();
- virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
- virtual void AddAll() { m_AllAllocations = true; }
+ virtual void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
+ virtual void AddAll() { m_AllAllocations = true; }
- virtual VkResult Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- VmaDefragmentationFlags flags);
+ virtual VkResult Defragment(
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove);
- virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
- virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
+ virtual VkDeviceSize GetBytesMoved() const { return m_BytesMoved; }
+ virtual uint32_t GetAllocationsMoved() const { return m_AllocationsMoved; }
private:
- struct BlockInfo
- {
- size_t origBlockIndex;
- };
-
- class FreeSpaceDatabase
- {
- public:
- FreeSpaceDatabase()
- {
- FreeSpace s = {};
- s.blockInfoIndex = SIZE_MAX;
- for(size_t i = 0; i < MAX_COUNT; ++i)
- {
- m_FreeSpaces[i] = s;
- }
- }
-
- void Register(size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
- {
- if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- return;
- }
-
- // Find first invalid or the smallest structure.
- size_t bestIndex = SIZE_MAX;
- for(size_t i = 0; i < MAX_COUNT; ++i)
- {
- // Empty structure.
- if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
- {
- bestIndex = i;
- break;
- }
- if(m_FreeSpaces[i].size < size &&
- (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
- {
- bestIndex = i;
- }
- }
-
- if(bestIndex != SIZE_MAX)
- {
- m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
- m_FreeSpaces[bestIndex].offset = offset;
- m_FreeSpaces[bestIndex].size = size;
- }
- }
-
- bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
- size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
- {
- size_t bestIndex = SIZE_MAX;
- VkDeviceSize bestFreeSpaceAfter = 0;
- for(size_t i = 0; i < MAX_COUNT; ++i)
- {
- // Structure is valid.
- if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
- {
- const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
- // Allocation fits into this structure.
- if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
- {
- const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
- (dstOffset + size);
- if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
- {
- bestIndex = i;
- bestFreeSpaceAfter = freeSpaceAfter;
- }
- }
- }
- }
-
- if(bestIndex != SIZE_MAX)
- {
- outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
- outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
-
- if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- // Leave this structure for remaining empty space.
- const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
- m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
- m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
- }
- else
- {
- // This structure becomes invalid.
- m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
- }
-
- return true;
- }
-
- return false;
- }
-
- private:
- static const size_t MAX_COUNT = 4;
-
- struct FreeSpace
- {
- size_t blockInfoIndex; // SIZE_MAX means this structure is invalid.
- VkDeviceSize offset;
- VkDeviceSize size;
- } m_FreeSpaces[MAX_COUNT];
- };
-
- const bool m_OverlappingMoveSupported;
-
- uint32_t m_AllocationCount;
- bool m_AllAllocations;
-
- VkDeviceSize m_BytesMoved;
- uint32_t m_AllocationsMoved;
-
- VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
-
- void PreprocessMetadata();
- void PostprocessMetadata();
- void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc);
+ struct BlockInfo
+ {
+ size_t origBlockIndex;
+ };
+
+ class FreeSpaceDatabase
+ {
+ public:
+ FreeSpaceDatabase()
+ {
+ FreeSpace s = {};
+ s.blockInfoIndex = SIZE_MAX;
+ for(size_t i = 0; i < MAX_COUNT; ++i)
+ {
+ m_FreeSpaces[i] = s;
+ }
+ }
+
+ void Register(size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
+ {
+ if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ return;
+ }
+
+ // Find first invalid or the smallest structure.
+ size_t bestIndex = SIZE_MAX;
+ for(size_t i = 0; i < MAX_COUNT; ++i)
+ {
+ // Empty structure.
+ if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
+ {
+ bestIndex = i;
+ break;
+ }
+ if(m_FreeSpaces[i].size < size &&
+ (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
+ {
+ bestIndex = i;
+ }
+ }
+
+ if(bestIndex != SIZE_MAX)
+ {
+ m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
+ m_FreeSpaces[bestIndex].offset = offset;
+ m_FreeSpaces[bestIndex].size = size;
+ }
+ }
+
+ bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
+ size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
+ {
+ size_t bestIndex = SIZE_MAX;
+ VkDeviceSize bestFreeSpaceAfter = 0;
+ for(size_t i = 0; i < MAX_COUNT; ++i)
+ {
+ // Structure is valid.
+ if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
+ {
+ const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
+ // Allocation fits into this structure.
+ if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
+ {
+ const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
+ (dstOffset + size);
+ if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
+ {
+ bestIndex = i;
+ bestFreeSpaceAfter = freeSpaceAfter;
+ }
+ }
+ }
+ }
+
+ if(bestIndex != SIZE_MAX)
+ {
+ outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
+ outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
+
+ if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ // Leave this structure for remaining empty space.
+ const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
+ m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
+ m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
+ }
+ else
+ {
+ // This structure becomes invalid.
+ m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
+ }
+
+ return true;
+ }
+
+ return false;
+ }
+
+ private:
+ static const size_t MAX_COUNT = 4;
+
+ struct FreeSpace
+ {
+ size_t blockInfoIndex; // SIZE_MAX means this structure is invalid.
+ VkDeviceSize offset;
+ VkDeviceSize size;
+ } m_FreeSpaces[MAX_COUNT];
+ };
+
+ const bool m_OverlappingMoveSupported;
+
+ uint32_t m_AllocationCount;
+ bool m_AllAllocations;
+
+ VkDeviceSize m_BytesMoved;
+ uint32_t m_AllocationsMoved;
+
+ VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
+
+ void PreprocessMetadata();
+ void PostprocessMetadata();
+ void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc);
};
struct VmaBlockDefragmentationContext
{
- enum BLOCK_FLAG
- {
- BLOCK_FLAG_USED = 0x00000001,
- };
- uint32_t flags;
- VkBuffer hBuffer;
+ enum BLOCK_FLAG
+ {
+ BLOCK_FLAG_USED = 0x00000001,
+ };
+ uint32_t flags;
+ VkBuffer hBuffer;
};
class VmaBlockVectorDefragmentationContext
{
- VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
+ VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
public:
- VkResult res;
- bool mutexLocked;
- VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
- uint32_t defragmentationMovesProcessed;
- uint32_t defragmentationMovesCommitted;
- bool hasDefragmentationPlan;
-
- VmaBlockVectorDefragmentationContext(
- VmaAllocator hAllocator,
- VmaPool hCustomPool, // Optional.
- VmaBlockVector* pBlockVector,
- uint32_t currFrameIndex);
- ~VmaBlockVectorDefragmentationContext();
-
- VmaPool GetCustomPool() const { return m_hCustomPool; }
- VmaBlockVector* GetBlockVector() const { return m_pBlockVector; }
- VmaDefragmentationAlgorithm* GetAlgorithm() const { return m_pAlgorithm; }
-
- void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
- void AddAll() { m_AllAllocations = true; }
-
- void Begin(bool overlappingMoveSupported, VmaDefragmentationFlags flags);
+ VkResult res;
+ bool mutexLocked;
+ VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
+
+ VmaBlockVectorDefragmentationContext(
+ VmaAllocator hAllocator,
+ VmaPool hCustomPool, // Optional.
+ VmaBlockVector* pBlockVector,
+ uint32_t currFrameIndex);
+ ~VmaBlockVectorDefragmentationContext();
+
+ VmaPool GetCustomPool() const { return m_hCustomPool; }
+ VmaBlockVector* GetBlockVector() const { return m_pBlockVector; }
+ VmaDefragmentationAlgorithm* GetAlgorithm() const { return m_pAlgorithm; }
+
+ void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
+ void AddAll() { m_AllAllocations = true; }
+
+ void Begin(bool overlappingMoveSupported);
private:
- const VmaAllocator m_hAllocator;
- // Null if not from custom pool.
- const VmaPool m_hCustomPool;
- // Redundant, for convenience not to fetch from m_hCustomPool->m_BlockVector or m_hAllocator->m_pBlockVectors.
- VmaBlockVector* const m_pBlockVector;
- const uint32_t m_CurrFrameIndex;
- // Owner of this object.
- VmaDefragmentationAlgorithm* m_pAlgorithm;
-
- struct AllocInfo
- {
- VmaAllocation hAlloc;
- VkBool32* pChanged;
- };
- // Used between constructor and Begin.
- VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
- bool m_AllAllocations;
+ const VmaAllocator m_hAllocator;
+ // Null if not from custom pool.
+ const VmaPool m_hCustomPool;
+ // Redundant, for convenience not to fetch from m_hCustomPool->m_BlockVector or m_hAllocator->m_pBlockVectors.
+ VmaBlockVector* const m_pBlockVector;
+ const uint32_t m_CurrFrameIndex;
+ // Owner of this object.
+ VmaDefragmentationAlgorithm* m_pAlgorithm;
+
+ struct AllocInfo
+ {
+ VmaAllocation hAlloc;
+ VkBool32* pChanged;
+ };
+ // Used between constructor and Begin.
+ VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
+ bool m_AllAllocations;
};
struct VmaDefragmentationContext_T
{
private:
- VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
+ VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
public:
- VmaDefragmentationContext_T(
- VmaAllocator hAllocator,
- uint32_t currFrameIndex,
- uint32_t flags,
- VmaDefragmentationStats* pStats);
- ~VmaDefragmentationContext_T();
-
- void AddPools(uint32_t poolCount, VmaPool* pPools);
- void AddAllocations(
- uint32_t allocationCount,
- VmaAllocation* pAllocations,
- VkBool32* pAllocationsChanged);
-
- /*
- Returns:
- - `VK_SUCCESS` if succeeded and object can be destroyed immediately.
- - `VK_NOT_READY` if succeeded but the object must remain alive until vmaDefragmentationEnd().
- - Negative value if error occured and object can be destroyed immediately.
- */
- VkResult Defragment(
- VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
- VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats, VmaDefragmentationFlags flags);
-
- VkResult DefragmentPassBegin(VmaDefragmentationPassInfo* pInfo);
- VkResult DefragmentPassEnd();
+ VmaDefragmentationContext_T(
+ VmaAllocator hAllocator,
+ uint32_t currFrameIndex,
+ uint32_t flags,
+ VmaDefragmentationStats* pStats);
+ ~VmaDefragmentationContext_T();
+
+ void AddPools(uint32_t poolCount, VmaPool* pPools);
+ void AddAllocations(
+ uint32_t allocationCount,
+ VmaAllocation* pAllocations,
+ VkBool32* pAllocationsChanged);
+
+ /*
+ Returns:
+ - `VK_SUCCESS` if succeeded and object can be destroyed immediately.
+ - `VK_NOT_READY` if succeeded but the object must remain alive until vmaDefragmentationEnd().
+ - Negative value if error occured and object can be destroyed immediately.
+ */
+ VkResult Defragment(
+ VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
+ VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
+ VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats);
private:
- const VmaAllocator m_hAllocator;
- const uint32_t m_CurrFrameIndex;
- const uint32_t m_Flags;
- VmaDefragmentationStats* const m_pStats;
-
- VkDeviceSize m_MaxCpuBytesToMove;
- uint32_t m_MaxCpuAllocationsToMove;
- VkDeviceSize m_MaxGpuBytesToMove;
- uint32_t m_MaxGpuAllocationsToMove;
-
- // Owner of these objects.
- VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
- // Owner of these objects.
- VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
+ const VmaAllocator m_hAllocator;
+ const uint32_t m_CurrFrameIndex;
+ const uint32_t m_Flags;
+ VmaDefragmentationStats* const m_pStats;
+ // Owner of these objects.
+ VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
+ // Owner of these objects.
+ VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
};
#if VMA_RECORDING_ENABLED
@@ -7001,133 +6854,132 @@ private:
class VmaRecorder
{
public:
- VmaRecorder();
- VkResult Init(const VmaRecordSettings& settings, bool useMutex);
- void WriteConfiguration(
- const VkPhysicalDeviceProperties& devProps,
- const VkPhysicalDeviceMemoryProperties& memProps,
- uint32_t vulkanApiVersion,
- bool dedicatedAllocationExtensionEnabled,
- bool bindMemory2ExtensionEnabled,
- bool memoryBudgetExtensionEnabled,
- bool deviceCoherentMemoryExtensionEnabled);
- ~VmaRecorder();
-
- void RecordCreateAllocator(uint32_t frameIndex);
- void RecordDestroyAllocator(uint32_t frameIndex);
- void RecordCreatePool(uint32_t frameIndex,
- const VmaPoolCreateInfo& createInfo,
- VmaPool pool);
- void RecordDestroyPool(uint32_t frameIndex, VmaPool pool);
- void RecordAllocateMemory(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation);
- void RecordAllocateMemoryPages(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations);
- void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation);
- void RecordAllocateMemoryForImage(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation);
- void RecordFreeMemory(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordFreeMemoryPages(uint32_t frameIndex,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations);
- void RecordSetAllocationUserData(uint32_t frameIndex,
- VmaAllocation allocation,
- const void* pUserData);
- void RecordCreateLostAllocation(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordMapMemory(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordUnmapMemory(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordFlushAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
- void RecordInvalidateAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
- void RecordCreateBuffer(uint32_t frameIndex,
- const VkBufferCreateInfo& bufCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation);
- void RecordCreateImage(uint32_t frameIndex,
- const VkImageCreateInfo& imageCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation);
- void RecordDestroyBuffer(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordDestroyImage(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordTouchAllocation(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordGetAllocationInfo(uint32_t frameIndex,
- VmaAllocation allocation);
- void RecordMakePoolAllocationsLost(uint32_t frameIndex,
- VmaPool pool);
- void RecordDefragmentationBegin(uint32_t frameIndex,
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationContext ctx);
- void RecordDefragmentationEnd(uint32_t frameIndex,
- VmaDefragmentationContext ctx);
- void RecordSetPoolName(uint32_t frameIndex,
- VmaPool pool,
- const char* name);
+ VmaRecorder();
+ VkResult Init(const VmaRecordSettings& settings, bool useMutex);
+ void WriteConfiguration(
+ const VkPhysicalDeviceProperties& devProps,
+ const VkPhysicalDeviceMemoryProperties& memProps,
+ uint32_t vulkanApiVersion,
+ bool dedicatedAllocationExtensionEnabled,
+ bool bindMemory2ExtensionEnabled,
+ bool memoryBudgetExtensionEnabled);
+ ~VmaRecorder();
+
+ void RecordCreateAllocator(uint32_t frameIndex);
+ void RecordDestroyAllocator(uint32_t frameIndex);
+ void RecordCreatePool(uint32_t frameIndex,
+ const VmaPoolCreateInfo& createInfo,
+ VmaPool pool);
+ void RecordDestroyPool(uint32_t frameIndex, VmaPool pool);
+ void RecordAllocateMemory(uint32_t frameIndex,
+ const VkMemoryRequirements& vkMemReq,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaAllocation allocation);
+ void RecordAllocateMemoryPages(uint32_t frameIndex,
+ const VkMemoryRequirements& vkMemReq,
+ const VmaAllocationCreateInfo& createInfo,
+ uint64_t allocationCount,
+ const VmaAllocation* pAllocations);
+ void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
+ const VkMemoryRequirements& vkMemReq,
+ bool requiresDedicatedAllocation,
+ bool prefersDedicatedAllocation,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaAllocation allocation);
+ void RecordAllocateMemoryForImage(uint32_t frameIndex,
+ const VkMemoryRequirements& vkMemReq,
+ bool requiresDedicatedAllocation,
+ bool prefersDedicatedAllocation,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaAllocation allocation);
+ void RecordFreeMemory(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordFreeMemoryPages(uint32_t frameIndex,
+ uint64_t allocationCount,
+ const VmaAllocation* pAllocations);
+ void RecordSetAllocationUserData(uint32_t frameIndex,
+ VmaAllocation allocation,
+ const void* pUserData);
+ void RecordCreateLostAllocation(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordMapMemory(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordUnmapMemory(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordFlushAllocation(uint32_t frameIndex,
+ VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+ void RecordInvalidateAllocation(uint32_t frameIndex,
+ VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
+ void RecordCreateBuffer(uint32_t frameIndex,
+ const VkBufferCreateInfo& bufCreateInfo,
+ const VmaAllocationCreateInfo& allocCreateInfo,
+ VmaAllocation allocation);
+ void RecordCreateImage(uint32_t frameIndex,
+ const VkImageCreateInfo& imageCreateInfo,
+ const VmaAllocationCreateInfo& allocCreateInfo,
+ VmaAllocation allocation);
+ void RecordDestroyBuffer(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordDestroyImage(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordTouchAllocation(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordGetAllocationInfo(uint32_t frameIndex,
+ VmaAllocation allocation);
+ void RecordMakePoolAllocationsLost(uint32_t frameIndex,
+ VmaPool pool);
+ void RecordDefragmentationBegin(uint32_t frameIndex,
+ const VmaDefragmentationInfo2& info,
+ VmaDefragmentationContext ctx);
+ void RecordDefragmentationEnd(uint32_t frameIndex,
+ VmaDefragmentationContext ctx);
+ void RecordSetPoolName(uint32_t frameIndex,
+ VmaPool pool,
+ const char* name);
private:
- struct CallParams
- {
- uint32_t threadId;
- double time;
- };
-
- class UserDataString
- {
- public:
- UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData);
- const char* GetString() const { return m_Str; }
-
- private:
- char m_PtrStr[17];
- const char* m_Str;
- };
-
- bool m_UseMutex;
- VmaRecordFlags m_Flags;
- FILE* m_File;
- VMA_MUTEX m_FileMutex;
- int64_t m_Freq;
- int64_t m_StartCounter;
-
- void GetBasicParams(CallParams& outParams);
-
- // T must be a pointer type, e.g. VmaAllocation, VmaPool.
- template<typename T>
- void PrintPointerList(uint64_t count, const T* pItems)
- {
- if(count)
- {
- fprintf(m_File, "%p", pItems[0]);
- for(uint64_t i = 1; i < count; ++i)
- {
- fprintf(m_File, " %p", pItems[i]);
- }
- }
- }
-
- void PrintPointerList(uint64_t count, const VmaAllocation* pItems);
- void Flush();
+ struct CallParams
+ {
+ uint32_t threadId;
+ double time;
+ };
+
+ class UserDataString
+ {
+ public:
+ UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData);
+ const char* GetString() const { return m_Str; }
+
+ private:
+ char m_PtrStr[17];
+ const char* m_Str;
+ };
+
+ bool m_UseMutex;
+ VmaRecordFlags m_Flags;
+ FILE* m_File;
+ VMA_MUTEX m_FileMutex;
+ int64_t m_Freq;
+ int64_t m_StartCounter;
+
+ void GetBasicParams(CallParams& outParams);
+
+ // T must be a pointer type, e.g. VmaAllocation, VmaPool.
+ template<typename T>
+ void PrintPointerList(uint64_t count, const T* pItems)
+ {
+ if(count)
+ {
+ fprintf(m_File, "%p", pItems[0]);
+ for(uint64_t i = 1; i < count; ++i)
+ {
+ fprintf(m_File, " %p", pItems[i]);
+ }
+ }
+ }
+
+ void PrintPointerList(uint64_t count, const VmaAllocation* pItems);
+ void Flush();
};
#endif // #if VMA_RECORDING_ENABLED
@@ -7137,344 +6989,330 @@ Thread-safe wrapper over VmaPoolAllocator free list, for allocation of VmaAlloca
*/
class VmaAllocationObjectAllocator
{
- VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
+ VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
public:
- VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks);
+ VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks);
- template<typename... Types> VmaAllocation Allocate(Types... args);
- void Free(VmaAllocation hAlloc);
+ VmaAllocation Allocate();
+ void Free(VmaAllocation hAlloc);
private:
- VMA_MUTEX m_Mutex;
- VmaPoolAllocator<VmaAllocation_T> m_Allocator;
+ VMA_MUTEX m_Mutex;
+ VmaPoolAllocator<VmaAllocation_T> m_Allocator;
};
struct VmaCurrentBudgetData
{
- VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
- VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
+ VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
+ VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
#if VMA_MEMORY_BUDGET
- VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
- VMA_RW_MUTEX m_BudgetMutex;
- uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
- uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
- uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
+ VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
+ VMA_RW_MUTEX m_BudgetMutex;
+ uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
+ uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
+ uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
#endif // #if VMA_MEMORY_BUDGET
- VmaCurrentBudgetData()
- {
- for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
- {
- m_BlockBytes[heapIndex] = 0;
- m_AllocationBytes[heapIndex] = 0;
+ VmaCurrentBudgetData()
+ {
+ for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
+ {
+ m_BlockBytes[heapIndex] = 0;
+ m_AllocationBytes[heapIndex] = 0;
#if VMA_MEMORY_BUDGET
- m_VulkanUsage[heapIndex] = 0;
- m_VulkanBudget[heapIndex] = 0;
- m_BlockBytesAtBudgetFetch[heapIndex] = 0;
+ m_VulkanUsage[heapIndex] = 0;
+ m_VulkanBudget[heapIndex] = 0;
+ m_BlockBytesAtBudgetFetch[heapIndex] = 0;
#endif
- }
+ }
#if VMA_MEMORY_BUDGET
- m_OperationsSinceBudgetFetch = 0;
+ m_OperationsSinceBudgetFetch = 0;
#endif
- }
+ }
- void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
- {
- m_AllocationBytes[heapIndex] += allocationSize;
+ void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
+ {
+ m_AllocationBytes[heapIndex] += allocationSize;
#if VMA_MEMORY_BUDGET
- ++m_OperationsSinceBudgetFetch;
+ ++m_OperationsSinceBudgetFetch;
#endif
- }
+ }
- void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
- {
- VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize); // DELME
- m_AllocationBytes[heapIndex] -= allocationSize;
+ void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
+ {
+ VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize); // DELME
+ m_AllocationBytes[heapIndex] -= allocationSize;
#if VMA_MEMORY_BUDGET
- ++m_OperationsSinceBudgetFetch;
+ ++m_OperationsSinceBudgetFetch;
#endif
- }
+ }
};
// Main allocator object.
struct VmaAllocator_T
{
- VMA_CLASS_NO_COPY(VmaAllocator_T)
+ VMA_CLASS_NO_COPY(VmaAllocator_T)
public:
- bool m_UseMutex;
- uint32_t m_VulkanApiVersion;
- bool m_UseKhrDedicatedAllocation; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0).
- bool m_UseKhrBindMemory2; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0).
- bool m_UseExtMemoryBudget;
- bool m_UseAmdDeviceCoherentMemory;
- VkDevice m_hDevice;
- VkInstance m_hInstance;
- bool m_AllocationCallbacksSpecified;
- VkAllocationCallbacks m_AllocationCallbacks;
- VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
- VmaAllocationObjectAllocator m_AllocationObjectAllocator;
-
- // Each bit (1 << i) is set if HeapSizeLimit is enabled for that heap, so cannot allocate more than the heap size.
- uint32_t m_HeapSizeLimitMask;
-
- VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
- VkPhysicalDeviceMemoryProperties m_MemProps;
-
- // Default pools.
- VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
-
- // Each vector is sorted by memory (handle value).
- typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
- AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
- VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
-
- VmaCurrentBudgetData m_Budget;
-
- VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
- VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo);
- ~VmaAllocator_T();
-
- const VkAllocationCallbacks* GetAllocationCallbacks() const
- {
- return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
- }
- const VmaVulkanFunctions& GetVulkanFunctions() const
- {
- return m_VulkanFunctions;
- }
-
- VkDeviceSize GetBufferImageGranularity() const
- {
- return VMA_MAX(
- static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
- m_PhysicalDeviceProperties.limits.bufferImageGranularity);
- }
-
- uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
- uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
-
- uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
- {
- VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
- return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
- }
- // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT.
- bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const
- {
- return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- }
- // Minimum alignment for all allocations in specific memory type.
- VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const
- {
- return IsMemoryTypeNonCoherent(memTypeIndex) ?
- VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
- (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
- }
-
- bool IsIntegratedGpu() const
- {
- return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
- }
-
- uint32_t GetGlobalMemoryTypeBits() const { return m_GlobalMemoryTypeBits; }
+ bool m_UseMutex;
+ uint32_t m_VulkanApiVersion;
+ bool m_UseKhrDedicatedAllocation; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0).
+ bool m_UseKhrBindMemory2; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0).
+ bool m_UseExtMemoryBudget;
+ VkDevice m_hDevice;
+ VkInstance m_hInstance;
+ bool m_AllocationCallbacksSpecified;
+ VkAllocationCallbacks m_AllocationCallbacks;
+ VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks;
+ VmaAllocationObjectAllocator m_AllocationObjectAllocator;
+
+ // Each bit (1 << i) is set if HeapSizeLimit is enabled for that heap, so cannot allocate more than the heap size.
+ uint32_t m_HeapSizeLimitMask;
+
+ VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
+ VkPhysicalDeviceMemoryProperties m_MemProps;
+
+ // Default pools.
+ VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
+
+ // Each vector is sorted by memory (handle value).
+ typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
+ AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
+ VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
+
+ VmaCurrentBudgetData m_Budget;
+
+ VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo);
+ VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo);
+ ~VmaAllocator_T();
+
+ const VkAllocationCallbacks* GetAllocationCallbacks() const
+ {
+ return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
+ }
+ const VmaVulkanFunctions& GetVulkanFunctions() const
+ {
+ return m_VulkanFunctions;
+ }
+
+ VkDeviceSize GetBufferImageGranularity() const
+ {
+ return VMA_MAX(
+ static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
+ m_PhysicalDeviceProperties.limits.bufferImageGranularity);
+ }
+
+ uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; }
+ uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; }
+
+ uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const
+ {
+ VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
+ return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
+ }
+ // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT.
+ bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const
+ {
+ return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ }
+ // Minimum alignment for all allocations in specific memory type.
+ VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const
+ {
+ return IsMemoryTypeNonCoherent(memTypeIndex) ?
+ VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
+ (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
+ }
+
+ bool IsIntegratedGpu() const
+ {
+ return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
+ }
#if VMA_RECORDING_ENABLED
- VmaRecorder* GetRecorder() const { return m_pRecorder; }
+ VmaRecorder* GetRecorder() const { return m_pRecorder; }
#endif
- void GetBufferMemoryRequirements(
- VkBuffer hBuffer,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const;
- void GetImageMemoryRequirements(
- VkImage hImage,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const;
-
- // Main allocation function.
- VkResult AllocateMemory(
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- // Main deallocation function.
- void FreeMemory(
- size_t allocationCount,
- const VmaAllocation* pAllocations);
-
- VkResult ResizeAllocation(
- const VmaAllocation alloc,
- VkDeviceSize newSize);
-
- void CalculateStats(VmaStats* pStats);
-
- void GetBudget(
- VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
+ void GetBufferMemoryRequirements(
+ VkBuffer hBuffer,
+ VkMemoryRequirements& memReq,
+ bool& requiresDedicatedAllocation,
+ bool& prefersDedicatedAllocation) const;
+ void GetImageMemoryRequirements(
+ VkImage hImage,
+ VkMemoryRequirements& memReq,
+ bool& requiresDedicatedAllocation,
+ bool& prefersDedicatedAllocation) const;
+
+ // Main allocation function.
+ VkResult AllocateMemory(
+ const VkMemoryRequirements& vkMemReq,
+ bool requiresDedicatedAllocation,
+ bool prefersDedicatedAllocation,
+ VkBuffer dedicatedBuffer,
+ VkImage dedicatedImage,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaSuballocationType suballocType,
+ size_t allocationCount,
+ VmaAllocation* pAllocations);
+
+ // Main deallocation function.
+ void FreeMemory(
+ size_t allocationCount,
+ const VmaAllocation* pAllocations);
+
+ VkResult ResizeAllocation(
+ const VmaAllocation alloc,
+ VkDeviceSize newSize);
+
+ void CalculateStats(VmaStats* pStats);
+
+ void GetBudget(
+ VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
#if VMA_STATS_STRING_ENABLED
- void PrintDetailedMap(class VmaJsonWriter& json);
+ void PrintDetailedMap(class VmaJsonWriter& json);
#endif
- VkResult DefragmentationBegin(
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext* pContext);
- VkResult DefragmentationEnd(
- VmaDefragmentationContext context);
-
- VkResult DefragmentationPassBegin(
- VmaDefragmentationPassInfo* pInfo,
- VmaDefragmentationContext context);
- VkResult DefragmentationPassEnd(
- VmaDefragmentationContext context);
-
- void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
- bool TouchAllocation(VmaAllocation hAllocation);
-
- VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
- void DestroyPool(VmaPool pool);
- void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
-
- void SetCurrentFrameIndex(uint32_t frameIndex);
- uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); }
-
- void MakePoolAllocationsLost(
- VmaPool hPool,
- size_t* pLostAllocationCount);
- VkResult CheckPoolCorruption(VmaPool hPool);
- VkResult CheckCorruption(uint32_t memoryTypeBits);
-
- void CreateLostAllocation(VmaAllocation* pAllocation);
-
- // Call to Vulkan function vkAllocateMemory with accompanying bookkeeping.
- VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
- // Call to Vulkan function vkFreeMemory with accompanying bookkeeping.
- void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
- // Call to Vulkan function vkBindBufferMemory or vkBindBufferMemory2KHR.
- VkResult BindVulkanBuffer(
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkBuffer buffer,
- const void* pNext);
- // Call to Vulkan function vkBindImageMemory or vkBindImageMemory2KHR.
- VkResult BindVulkanImage(
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkImage image,
- const void* pNext);
-
- VkResult Map(VmaAllocation hAllocation, void** ppData);
- void Unmap(VmaAllocation hAllocation);
-
- VkResult BindBufferMemory(
- VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkBuffer hBuffer,
- const void* pNext);
- VkResult BindImageMemory(
- VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkImage hImage,
- const void* pNext);
-
- void FlushOrInvalidateAllocation(
- VmaAllocation hAllocation,
- VkDeviceSize offset, VkDeviceSize size,
- VMA_CACHE_OPERATION op);
-
- void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern);
-
- /*
- Returns bit mask of memory types that can support defragmentation on GPU as
- they support creation of required buffer for copy operations.
- */
- uint32_t GetGpuDefragmentationMemoryTypeBits();
+ VkResult DefragmentationBegin(
+ const VmaDefragmentationInfo2& info,
+ VmaDefragmentationStats* pStats,
+ VmaDefragmentationContext* pContext);
+ VkResult DefragmentationEnd(
+ VmaDefragmentationContext context);
+
+ void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo);
+ bool TouchAllocation(VmaAllocation hAllocation);
+
+ VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool);
+ void DestroyPool(VmaPool pool);
+ void GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats);
+
+ void SetCurrentFrameIndex(uint32_t frameIndex);
+ uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); }
+
+ void MakePoolAllocationsLost(
+ VmaPool hPool,
+ size_t* pLostAllocationCount);
+ VkResult CheckPoolCorruption(VmaPool hPool);
+ VkResult CheckCorruption(uint32_t memoryTypeBits);
+
+ void CreateLostAllocation(VmaAllocation* pAllocation);
+
+ // Call to Vulkan function vkAllocateMemory with accompanying bookkeeping.
+ VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
+ // Call to Vulkan function vkFreeMemory with accompanying bookkeeping.
+ void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
+ // Call to Vulkan function vkBindBufferMemory or vkBindBufferMemory2KHR.
+ VkResult BindVulkanBuffer(
+ VkDeviceMemory memory,
+ VkDeviceSize memoryOffset,
+ VkBuffer buffer,
+ const void* pNext);
+ // Call to Vulkan function vkBindImageMemory or vkBindImageMemory2KHR.
+ VkResult BindVulkanImage(
+ VkDeviceMemory memory,
+ VkDeviceSize memoryOffset,
+ VkImage image,
+ const void* pNext);
+
+ VkResult Map(VmaAllocation hAllocation, void** ppData);
+ void Unmap(VmaAllocation hAllocation);
+
+ VkResult BindBufferMemory(
+ VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkBuffer hBuffer,
+ const void* pNext);
+ VkResult BindImageMemory(
+ VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkImage hImage,
+ const void* pNext);
+
+ void FlushOrInvalidateAllocation(
+ VmaAllocation hAllocation,
+ VkDeviceSize offset, VkDeviceSize size,
+ VMA_CACHE_OPERATION op);
+
+ void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern);
+
+ /*
+ Returns bit mask of memory types that can support defragmentation on GPU as
+ they support creation of required buffer for copy operations.
+ */
+ uint32_t GetGpuDefragmentationMemoryTypeBits();
private:
- VkDeviceSize m_PreferredLargeHeapBlockSize;
-
- VkPhysicalDevice m_PhysicalDevice;
- VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
- VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits; // UINT32_MAX means uninitialized.
-
- VMA_RW_MUTEX m_PoolsMutex;
- // Protected by m_PoolsMutex. Sorted by pointer value.
- VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
- uint32_t m_NextPoolId;
+ VkDeviceSize m_PreferredLargeHeapBlockSize;
- VmaVulkanFunctions m_VulkanFunctions;
+ VkPhysicalDevice m_PhysicalDevice;
+ VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
+ VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits; // UINT32_MAX means uninitialized.
+
+ VMA_RW_MUTEX m_PoolsMutex;
+ // Protected by m_PoolsMutex. Sorted by pointer value.
+ VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
+ uint32_t m_NextPoolId;
- // Global bit mask AND-ed with any memoryTypeBits to disallow certain memory types.
- uint32_t m_GlobalMemoryTypeBits;
+ VmaVulkanFunctions m_VulkanFunctions;
#if VMA_RECORDING_ENABLED
- VmaRecorder* m_pRecorder;
+ VmaRecorder* m_pRecorder;
#endif
- void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
-
- VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
-
- VkResult AllocateMemoryOfType(
- VkDeviceSize size,
- VkDeviceSize alignment,
- bool dedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- uint32_t memTypeIndex,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- // Helper function only to be used inside AllocateDedicatedMemory.
- VkResult AllocateDedicatedMemoryPage(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- const VkMemoryAllocateInfo& allocInfo,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VmaAllocation* pAllocation);
-
- // Allocates and registers new VkDeviceMemory specifically for dedicated allocations.
- VkResult AllocateDedicatedMemory(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- bool withinBudget,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- size_t allocationCount,
- VmaAllocation* pAllocations);
-
- void FreeDedicatedMemory(const VmaAllocation allocation);
-
- /*
- Calculates and returns bit mask of memory types that can support defragmentation
- on GPU as they support creation of required buffer for copy operations.
- */
- uint32_t CalculateGpuDefragmentationMemoryTypeBits() const;
-
- uint32_t CalculateGlobalMemoryTypeBits() const;
+ void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions);
+
+ VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
+
+ VkResult AllocateMemoryOfType(
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ bool dedicatedAllocation,
+ VkBuffer dedicatedBuffer,
+ VkImage dedicatedImage,
+ const VmaAllocationCreateInfo& createInfo,
+ uint32_t memTypeIndex,
+ VmaSuballocationType suballocType,
+ size_t allocationCount,
+ VmaAllocation* pAllocations);
+
+ // Helper function only to be used inside AllocateDedicatedMemory.
+ VkResult AllocateDedicatedMemoryPage(
+ VkDeviceSize size,
+ VmaSuballocationType suballocType,
+ uint32_t memTypeIndex,
+ const VkMemoryAllocateInfo& allocInfo,
+ bool map,
+ bool isUserDataString,
+ void* pUserData,
+ VmaAllocation* pAllocation);
+
+ // Allocates and registers new VkDeviceMemory specifically for dedicated allocations.
+ VkResult AllocateDedicatedMemory(
+ VkDeviceSize size,
+ VmaSuballocationType suballocType,
+ uint32_t memTypeIndex,
+ bool withinBudget,
+ bool map,
+ bool isUserDataString,
+ void* pUserData,
+ VkBuffer dedicatedBuffer,
+ VkImage dedicatedImage,
+ size_t allocationCount,
+ VmaAllocation* pAllocations);
+
+ void FreeDedicatedMemory(const VmaAllocation allocation);
+
+ /*
+ Calculates and returns bit mask of memory types that can support defragmentation
+ on GPU as they support creation of required buffer for copy operations.
+ */
+ uint32_t CalculateGpuDefragmentationMemoryTypeBits() const;
#if VMA_MEMORY_BUDGET
- void UpdateVulkanBudget();
+ void UpdateVulkanBudget();
#endif // #if VMA_MEMORY_BUDGET
};
@@ -7483,45 +7321,45 @@ private:
static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment)
{
- return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
+ return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
}
static void VmaFree(VmaAllocator hAllocator, void* ptr)
{
- VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
+ VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
}
template<typename T>
static T* VmaAllocate(VmaAllocator hAllocator)
{
- return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
+ return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T));
}
template<typename T>
static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count)
{
- return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
+ return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T));
}
template<typename T>
static void vma_delete(VmaAllocator hAllocator, T* ptr)
{
- if(ptr != VMA_NULL)
- {
- ptr->~T();
- VmaFree(hAllocator, ptr);
- }
+ if(ptr != VMA_NULL)
+ {
+ ptr->~T();
+ VmaFree(hAllocator, ptr);
+ }
}
template<typename T>
static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
{
- if(ptr != VMA_NULL)
- {
- for(size_t i = count; i--; )
- ptr[i].~T();
- VmaFree(hAllocator, ptr);
- }
+ if(ptr != VMA_NULL)
+ {
+ for(size_t i = count; i--; )
+ ptr[i].~T();
+ VmaFree(hAllocator, ptr);
+ }
}
////////////////////////////////////////////////////////////////////////////////
@@ -7532,65 +7370,65 @@ static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count)
class VmaStringBuilder
{
public:
- VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
- size_t GetLength() const { return m_Data.size(); }
- const char* GetData() const { return m_Data.data(); }
+ VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
+ size_t GetLength() const { return m_Data.size(); }
+ const char* GetData() const { return m_Data.data(); }
- void Add(char ch) { m_Data.push_back(ch); }
- void Add(const char* pStr);
- void AddNewLine() { Add('\n'); }
- void AddNumber(uint32_t num);
- void AddNumber(uint64_t num);
- void AddPointer(const void* ptr);
+ void Add(char ch) { m_Data.push_back(ch); }
+ void Add(const char* pStr);
+ void AddNewLine() { Add('\n'); }
+ void AddNumber(uint32_t num);
+ void AddNumber(uint64_t num);
+ void AddPointer(const void* ptr);
private:
- VmaVector< char, VmaStlAllocator<char> > m_Data;
+ VmaVector< char, VmaStlAllocator<char> > m_Data;
};
void VmaStringBuilder::Add(const char* pStr)
{
- const size_t strLen = strlen(pStr);
- if(strLen > 0)
- {
- const size_t oldCount = m_Data.size();
- m_Data.resize(oldCount + strLen);
- memcpy(m_Data.data() + oldCount, pStr, strLen);
- }
+ const size_t strLen = strlen(pStr);
+ if(strLen > 0)
+ {
+ const size_t oldCount = m_Data.size();
+ m_Data.resize(oldCount + strLen);
+ memcpy(m_Data.data() + oldCount, pStr, strLen);
+ }
}
void VmaStringBuilder::AddNumber(uint32_t num)
{
- char buf[11];
- buf[10] = '\0';
- char *p = &buf[10];
- do
- {
- *--p = '0' + (num % 10);
- num /= 10;
- }
- while(num);
- Add(p);
+ char buf[11];
+ buf[10] = '\0';
+ char *p = &buf[10];
+ do
+ {
+ *--p = '0' + (num % 10);
+ num /= 10;
+ }
+ while(num);
+ Add(p);
}
void VmaStringBuilder::AddNumber(uint64_t num)
{
- char buf[21];
- buf[20] = '\0';
- char *p = &buf[20];
- do
- {
- *--p = '0' + (num % 10);
- num /= 10;
- }
- while(num);
- Add(p);
+ char buf[21];
+ buf[20] = '\0';
+ char *p = &buf[20];
+ do
+ {
+ *--p = '0' + (num % 10);
+ num /= 10;
+ }
+ while(num);
+ Add(p);
}
void VmaStringBuilder::AddPointer(const void* ptr)
{
- char buf[21];
- VmaPtrToStr(buf, sizeof(buf), ptr);
- Add(buf);
+ char buf[21];
+ VmaPtrToStr(buf, sizeof(buf), ptr);
+ Add(buf);
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -7602,283 +7440,283 @@ void VmaStringBuilder::AddPointer(const void* ptr)
class VmaJsonWriter
{
- VMA_CLASS_NO_COPY(VmaJsonWriter)
+ VMA_CLASS_NO_COPY(VmaJsonWriter)
public:
- VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
- ~VmaJsonWriter();
-
- void BeginObject(bool singleLine = false);
- void EndObject();
-
- void BeginArray(bool singleLine = false);
- void EndArray();
-
- void WriteString(const char* pStr);
- void BeginString(const char* pStr = VMA_NULL);
- void ContinueString(const char* pStr);
- void ContinueString(uint32_t n);
- void ContinueString(uint64_t n);
- void ContinueString_Pointer(const void* ptr);
- void EndString(const char* pStr = VMA_NULL);
-
- void WriteNumber(uint32_t n);
- void WriteNumber(uint64_t n);
- void WriteBool(bool b);
- void WriteNull();
+ VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
+ ~VmaJsonWriter();
+
+ void BeginObject(bool singleLine = false);
+ void EndObject();
+
+ void BeginArray(bool singleLine = false);
+ void EndArray();
+
+ void WriteString(const char* pStr);
+ void BeginString(const char* pStr = VMA_NULL);
+ void ContinueString(const char* pStr);
+ void ContinueString(uint32_t n);
+ void ContinueString(uint64_t n);
+ void ContinueString_Pointer(const void* ptr);
+ void EndString(const char* pStr = VMA_NULL);
+
+ void WriteNumber(uint32_t n);
+ void WriteNumber(uint64_t n);
+ void WriteBool(bool b);
+ void WriteNull();
private:
- static const char* const INDENT;
-
- enum COLLECTION_TYPE
- {
- COLLECTION_TYPE_OBJECT,
- COLLECTION_TYPE_ARRAY,
- };
- struct StackItem
- {
- COLLECTION_TYPE type;
- uint32_t valueCount;
- bool singleLineMode;
- };
-
- VmaStringBuilder& m_SB;
- VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
- bool m_InsideString;
-
- void BeginValue(bool isString);
- void WriteIndent(bool oneLess = false);
+ static const char* const INDENT;
+
+ enum COLLECTION_TYPE
+ {
+ COLLECTION_TYPE_OBJECT,
+ COLLECTION_TYPE_ARRAY,
+ };
+ struct StackItem
+ {
+ COLLECTION_TYPE type;
+ uint32_t valueCount;
+ bool singleLineMode;
+ };
+
+ VmaStringBuilder& m_SB;
+ VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
+ bool m_InsideString;
+
+ void BeginValue(bool isString);
+ void WriteIndent(bool oneLess = false);
};
const char* const VmaJsonWriter::INDENT = " ";
VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
- m_SB(sb),
- m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
- m_InsideString(false)
+ m_SB(sb),
+ m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
+ m_InsideString(false)
{
}
VmaJsonWriter::~VmaJsonWriter()
{
- VMA_ASSERT(!m_InsideString);
- VMA_ASSERT(m_Stack.empty());
+ VMA_ASSERT(!m_InsideString);
+ VMA_ASSERT(m_Stack.empty());
}
void VmaJsonWriter::BeginObject(bool singleLine)
{
- VMA_ASSERT(!m_InsideString);
+ VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.Add('{');
+ BeginValue(false);
+ m_SB.Add('{');
- StackItem item;
- item.type = COLLECTION_TYPE_OBJECT;
- item.valueCount = 0;
- item.singleLineMode = singleLine;
- m_Stack.push_back(item);
+ StackItem item;
+ item.type = COLLECTION_TYPE_OBJECT;
+ item.valueCount = 0;
+ item.singleLineMode = singleLine;
+ m_Stack.push_back(item);
}
void VmaJsonWriter::EndObject()
{
- VMA_ASSERT(!m_InsideString);
+ VMA_ASSERT(!m_InsideString);
- WriteIndent(true);
- m_SB.Add('}');
+ WriteIndent(true);
+ m_SB.Add('}');
- VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
- m_Stack.pop_back();
+ VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
+ m_Stack.pop_back();
}
void VmaJsonWriter::BeginArray(bool singleLine)
{
- VMA_ASSERT(!m_InsideString);
+ VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.Add('[');
+ BeginValue(false);
+ m_SB.Add('[');
- StackItem item;
- item.type = COLLECTION_TYPE_ARRAY;
- item.valueCount = 0;
- item.singleLineMode = singleLine;
- m_Stack.push_back(item);
+ StackItem item;
+ item.type = COLLECTION_TYPE_ARRAY;
+ item.valueCount = 0;
+ item.singleLineMode = singleLine;
+ m_Stack.push_back(item);
}
void VmaJsonWriter::EndArray()
{
- VMA_ASSERT(!m_InsideString);
+ VMA_ASSERT(!m_InsideString);
- WriteIndent(true);
- m_SB.Add(']');
+ WriteIndent(true);
+ m_SB.Add(']');
- VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
- m_Stack.pop_back();
+ VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
+ m_Stack.pop_back();
}
void VmaJsonWriter::WriteString(const char* pStr)
{
- BeginString(pStr);
- EndString();
+ BeginString(pStr);
+ EndString();
}
void VmaJsonWriter::BeginString(const char* pStr)
{
- VMA_ASSERT(!m_InsideString);
+ VMA_ASSERT(!m_InsideString);
- BeginValue(true);
- m_SB.Add('"');
- m_InsideString = true;
- if(pStr != VMA_NULL && pStr[0] != '\0')
- {
- ContinueString(pStr);
- }
+ BeginValue(true);
+ m_SB.Add('"');
+ m_InsideString = true;
+ if(pStr != VMA_NULL && pStr[0] != '\0')
+ {
+ ContinueString(pStr);
+ }
}
void VmaJsonWriter::ContinueString(const char* pStr)
{
- VMA_ASSERT(m_InsideString);
-
- const size_t strLen = strlen(pStr);
- for(size_t i = 0; i < strLen; ++i)
- {
- char ch = pStr[i];
- if(ch == '\\')
- {
- m_SB.Add("\\\\");
- }
- else if(ch == '"')
- {
- m_SB.Add("\\\"");
- }
- else if(ch >= 32)
- {
- m_SB.Add(ch);
- }
- else switch(ch)
- {
- case '\b':
- m_SB.Add("\\b");
- break;
- case '\f':
- m_SB.Add("\\f");
- break;
- case '\n':
- m_SB.Add("\\n");
- break;
- case '\r':
- m_SB.Add("\\r");
- break;
- case '\t':
- m_SB.Add("\\t");
- break;
- default:
- VMA_ASSERT(0 && "Character not currently supported.");
- break;
- }
- }
+ VMA_ASSERT(m_InsideString);
+
+ const size_t strLen = strlen(pStr);
+ for(size_t i = 0; i < strLen; ++i)
+ {
+ char ch = pStr[i];
+ if(ch == '\\')
+ {
+ m_SB.Add("\\\\");
+ }
+ else if(ch == '"')
+ {
+ m_SB.Add("\\\"");
+ }
+ else if(ch >= 32)
+ {
+ m_SB.Add(ch);
+ }
+ else switch(ch)
+ {
+ case '\b':
+ m_SB.Add("\\b");
+ break;
+ case '\f':
+ m_SB.Add("\\f");
+ break;
+ case '\n':
+ m_SB.Add("\\n");
+ break;
+ case '\r':
+ m_SB.Add("\\r");
+ break;
+ case '\t':
+ m_SB.Add("\\t");
+ break;
+ default:
+ VMA_ASSERT(0 && "Character not currently supported.");
+ break;
+ }
+ }
}
void VmaJsonWriter::ContinueString(uint32_t n)
{
- VMA_ASSERT(m_InsideString);
- m_SB.AddNumber(n);
+ VMA_ASSERT(m_InsideString);
+ m_SB.AddNumber(n);
}
void VmaJsonWriter::ContinueString(uint64_t n)
{
- VMA_ASSERT(m_InsideString);
- m_SB.AddNumber(n);
+ VMA_ASSERT(m_InsideString);
+ m_SB.AddNumber(n);
}
void VmaJsonWriter::ContinueString_Pointer(const void* ptr)
{
- VMA_ASSERT(m_InsideString);
- m_SB.AddPointer(ptr);
+ VMA_ASSERT(m_InsideString);
+ m_SB.AddPointer(ptr);
}
void VmaJsonWriter::EndString(const char* pStr)
{
- VMA_ASSERT(m_InsideString);
- if(pStr != VMA_NULL && pStr[0] != '\0')
- {
- ContinueString(pStr);
- }
- m_SB.Add('"');
- m_InsideString = false;
+ VMA_ASSERT(m_InsideString);
+ if(pStr != VMA_NULL && pStr[0] != '\0')
+ {
+ ContinueString(pStr);
+ }
+ m_SB.Add('"');
+ m_InsideString = false;
}
void VmaJsonWriter::WriteNumber(uint32_t n)
{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.AddNumber(n);
+ VMA_ASSERT(!m_InsideString);
+ BeginValue(false);
+ m_SB.AddNumber(n);
}
void VmaJsonWriter::WriteNumber(uint64_t n)
{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.AddNumber(n);
+ VMA_ASSERT(!m_InsideString);
+ BeginValue(false);
+ m_SB.AddNumber(n);
}
void VmaJsonWriter::WriteBool(bool b)
{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.Add(b ? "true" : "false");
+ VMA_ASSERT(!m_InsideString);
+ BeginValue(false);
+ m_SB.Add(b ? "true" : "false");
}
void VmaJsonWriter::WriteNull()
{
- VMA_ASSERT(!m_InsideString);
- BeginValue(false);
- m_SB.Add("null");
+ VMA_ASSERT(!m_InsideString);
+ BeginValue(false);
+ m_SB.Add("null");
}
void VmaJsonWriter::BeginValue(bool isString)
{
- if(!m_Stack.empty())
- {
- StackItem& currItem = m_Stack.back();
- if(currItem.type == COLLECTION_TYPE_OBJECT &&
- currItem.valueCount % 2 == 0)
- {
- VMA_ASSERT(isString);
- }
-
- if(currItem.type == COLLECTION_TYPE_OBJECT &&
- currItem.valueCount % 2 != 0)
- {
- m_SB.Add(": ");
- }
- else if(currItem.valueCount > 0)
- {
- m_SB.Add(", ");
- WriteIndent();
- }
- else
- {
- WriteIndent();
- }
- ++currItem.valueCount;
- }
+ if(!m_Stack.empty())
+ {
+ StackItem& currItem = m_Stack.back();
+ if(currItem.type == COLLECTION_TYPE_OBJECT &&
+ currItem.valueCount % 2 == 0)
+ {
+ VMA_ASSERT(isString);
+ }
+
+ if(currItem.type == COLLECTION_TYPE_OBJECT &&
+ currItem.valueCount % 2 != 0)
+ {
+ m_SB.Add(": ");
+ }
+ else if(currItem.valueCount > 0)
+ {
+ m_SB.Add(", ");
+ WriteIndent();
+ }
+ else
+ {
+ WriteIndent();
+ }
+ ++currItem.valueCount;
+ }
}
void VmaJsonWriter::WriteIndent(bool oneLess)
{
- if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
- {
- m_SB.AddNewLine();
-
- size_t count = m_Stack.size();
- if(count > 0 && oneLess)
- {
- --count;
- }
- for(size_t i = 0; i < count; ++i)
- {
- m_SB.Add(INDENT);
- }
- }
+ if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
+ {
+ m_SB.AddNewLine();
+
+ size_t count = m_Stack.size();
+ if(count > 0 && oneLess)
+ {
+ --count;
+ }
+ for(size_t i = 0; i < count; ++i)
+ {
+ m_SB.Add(INDENT);
+ }
+ }
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -7887,359 +7725,359 @@ void VmaJsonWriter::WriteIndent(bool oneLess)
void VmaAllocation_T::SetUserData(VmaAllocator hAllocator, void* pUserData)
{
- if(IsUserDataString())
- {
- VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
+ if(IsUserDataString())
+ {
+ VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
- FreeUserDataString(hAllocator);
+ FreeUserDataString(hAllocator);
- if(pUserData != VMA_NULL)
- {
- m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (const char*)pUserData);
- }
- }
- else
- {
- m_pUserData = pUserData;
- }
+ if(pUserData != VMA_NULL)
+ {
+ m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (const char*)pUserData);
+ }
+ }
+ else
+ {
+ m_pUserData = pUserData;
+ }
}
void VmaAllocation_T::ChangeBlockAllocation(
- VmaAllocator hAllocator,
- VmaDeviceMemoryBlock* block,
- VkDeviceSize offset)
+ VmaAllocator hAllocator,
+ VmaDeviceMemoryBlock* block,
+ VkDeviceSize offset)
{
- VMA_ASSERT(block != VMA_NULL);
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+ VMA_ASSERT(block != VMA_NULL);
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- // Move mapping reference counter from old block to new block.
- if(block != m_BlockAllocation.m_Block)
- {
- uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
- if(IsPersistentMap())
- ++mapRefCount;
- m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
- block->Map(hAllocator, mapRefCount, VMA_NULL);
- }
+ // Move mapping reference counter from old block to new block.
+ if(block != m_BlockAllocation.m_Block)
+ {
+ uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
+ if(IsPersistentMap())
+ ++mapRefCount;
+ m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
+ block->Map(hAllocator, mapRefCount, VMA_NULL);
+ }
- m_BlockAllocation.m_Block = block;
- m_BlockAllocation.m_Offset = offset;
+ m_BlockAllocation.m_Block = block;
+ m_BlockAllocation.m_Offset = offset;
}
void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
{
- VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
- m_BlockAllocation.m_Offset = newOffset;
+ VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
+ m_BlockAllocation.m_Offset = newOffset;
}
VkDeviceSize VmaAllocation_T::GetOffset() const
{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_Offset;
- case ALLOCATION_TYPE_DEDICATED:
- return 0;
- default:
- VMA_ASSERT(0);
- return 0;
- }
+ switch(m_Type)
+ {
+ case ALLOCATION_TYPE_BLOCK:
+ return m_BlockAllocation.m_Offset;
+ case ALLOCATION_TYPE_DEDICATED:
+ return 0;
+ default:
+ VMA_ASSERT(0);
+ return 0;
+ }
}
VkDeviceMemory VmaAllocation_T::GetMemory() const
{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_Block->GetDeviceMemory();
- case ALLOCATION_TYPE_DEDICATED:
- return m_DedicatedAllocation.m_hMemory;
- default:
- VMA_ASSERT(0);
- return VK_NULL_HANDLE;
- }
+ switch(m_Type)
+ {
+ case ALLOCATION_TYPE_BLOCK:
+ return m_BlockAllocation.m_Block->GetDeviceMemory();
+ case ALLOCATION_TYPE_DEDICATED:
+ return m_DedicatedAllocation.m_hMemory;
+ default:
+ VMA_ASSERT(0);
+ return VK_NULL_HANDLE;
+ }
}
void* VmaAllocation_T::GetMappedData() const
{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- if(m_MapCount != 0)
- {
- void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
- VMA_ASSERT(pBlockData != VMA_NULL);
- return (char*)pBlockData + m_BlockAllocation.m_Offset;
- }
- else
- {
- return VMA_NULL;
- }
- break;
- case ALLOCATION_TYPE_DEDICATED:
- VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
- return m_DedicatedAllocation.m_pMappedData;
- default:
- VMA_ASSERT(0);
- return VMA_NULL;
- }
+ switch(m_Type)
+ {
+ case ALLOCATION_TYPE_BLOCK:
+ if(m_MapCount != 0)
+ {
+ void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
+ VMA_ASSERT(pBlockData != VMA_NULL);
+ return (char*)pBlockData + m_BlockAllocation.m_Offset;
+ }
+ else
+ {
+ return VMA_NULL;
+ }
+ break;
+ case ALLOCATION_TYPE_DEDICATED:
+ VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
+ return m_DedicatedAllocation.m_pMappedData;
+ default:
+ VMA_ASSERT(0);
+ return VMA_NULL;
+ }
}
bool VmaAllocation_T::CanBecomeLost() const
{
- switch(m_Type)
- {
- case ALLOCATION_TYPE_BLOCK:
- return m_BlockAllocation.m_CanBecomeLost;
- case ALLOCATION_TYPE_DEDICATED:
- return false;
- default:
- VMA_ASSERT(0);
- return false;
- }
+ switch(m_Type)
+ {
+ case ALLOCATION_TYPE_BLOCK:
+ return m_BlockAllocation.m_CanBecomeLost;
+ case ALLOCATION_TYPE_DEDICATED:
+ return false;
+ default:
+ VMA_ASSERT(0);
+ return false;
+ }
}
bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
{
- VMA_ASSERT(CanBecomeLost());
-
- /*
- Warning: This is a carefully designed algorithm.
- Do not modify unless you really know what you're doing :)
- */
- uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
- for(;;)
- {
- if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
- {
- VMA_ASSERT(0);
- return false;
- }
- else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
- {
- return false;
- }
- else // Last use time earlier than current time.
- {
- if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
- {
- // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
- // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
- return true;
- }
- }
- }
+ VMA_ASSERT(CanBecomeLost());
+
+ /*
+ Warning: This is a carefully designed algorithm.
+ Do not modify unless you really know what you're doing :)
+ */
+ uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
+ for(;;)
+ {
+ if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+ {
+ VMA_ASSERT(0);
+ return false;
+ }
+ else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
+ {
+ return false;
+ }
+ else // Last use time earlier than current time.
+ {
+ if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
+ {
+ // Setting hAllocation.LastUseFrameIndex atomic to VMA_FRAME_INDEX_LOST is enough to mark it as LOST.
+ // Calling code just needs to unregister this allocation in owning VmaDeviceMemoryBlock.
+ return true;
+ }
+ }
+ }
}
#if VMA_STATS_STRING_ENABLED
// Correspond to values of enum VmaSuballocationType.
static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
- "FREE",
- "UNKNOWN",
- "BUFFER",
- "IMAGE_UNKNOWN",
- "IMAGE_LINEAR",
- "IMAGE_OPTIMAL",
+ "FREE",
+ "UNKNOWN",
+ "BUFFER",
+ "IMAGE_UNKNOWN",
+ "IMAGE_LINEAR",
+ "IMAGE_OPTIMAL",
};
void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const
{
- json.WriteString("Type");
- json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
+ json.WriteString("Type");
+ json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
- json.WriteString("Size");
- json.WriteNumber(m_Size);
+ json.WriteString("Size");
+ json.WriteNumber(m_Size);
- if(m_pUserData != VMA_NULL)
- {
- json.WriteString("UserData");
- if(IsUserDataString())
- {
- json.WriteString((const char*)m_pUserData);
- }
- else
- {
- json.BeginString();
- json.ContinueString_Pointer(m_pUserData);
- json.EndString();
- }
- }
+ if(m_pUserData != VMA_NULL)
+ {
+ json.WriteString("UserData");
+ if(IsUserDataString())
+ {
+ json.WriteString((const char*)m_pUserData);
+ }
+ else
+ {
+ json.BeginString();
+ json.ContinueString_Pointer(m_pUserData);
+ json.EndString();
+ }
+ }
- json.WriteString("CreationFrameIndex");
- json.WriteNumber(m_CreationFrameIndex);
+ json.WriteString("CreationFrameIndex");
+ json.WriteNumber(m_CreationFrameIndex);
- json.WriteString("LastUseFrameIndex");
- json.WriteNumber(GetLastUseFrameIndex());
+ json.WriteString("LastUseFrameIndex");
+ json.WriteNumber(GetLastUseFrameIndex());
- if(m_BufferImageUsage != 0)
- {
- json.WriteString("Usage");
- json.WriteNumber(m_BufferImageUsage);
- }
+ if(m_BufferImageUsage != 0)
+ {
+ json.WriteString("Usage");
+ json.WriteNumber(m_BufferImageUsage);
+ }
}
#endif
void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
{
- VMA_ASSERT(IsUserDataString());
- VmaFreeString(hAllocator->GetAllocationCallbacks(), (char*)m_pUserData);
- m_pUserData = VMA_NULL;
+ VMA_ASSERT(IsUserDataString());
+ VmaFreeString(hAllocator->GetAllocationCallbacks(), (char*)m_pUserData);
+ m_pUserData = VMA_NULL;
}
void VmaAllocation_T::BlockAllocMap()
{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
+ VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
- {
- ++m_MapCount;
- }
- else
- {
- VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
- }
+ if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
+ {
+ ++m_MapCount;
+ }
+ else
+ {
+ VMA_ASSERT(0 && "Allocation mapped too many times simultaneously.");
+ }
}
void VmaAllocation_T::BlockAllocUnmap()
{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
+ VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
- {
- --m_MapCount;
- }
- else
- {
- VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
- }
+ if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
+ {
+ --m_MapCount;
+ }
+ else
+ {
+ VMA_ASSERT(0 && "Unmapping allocation not previously mapped.");
+ }
}
VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData)
{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
-
- if(m_MapCount != 0)
- {
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
- {
- VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
- *ppData = m_DedicatedAllocation.m_pMappedData;
- ++m_MapCount;
- return VK_SUCCESS;
- }
- else
- {
- VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
- return VK_ERROR_MEMORY_MAP_FAILED;
- }
- }
- else
- {
- VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
- hAllocator->m_hDevice,
- m_DedicatedAllocation.m_hMemory,
- 0, // offset
- VK_WHOLE_SIZE,
- 0, // flags
- ppData);
- if(result == VK_SUCCESS)
- {
- m_DedicatedAllocation.m_pMappedData = *ppData;
- m_MapCount = 1;
- }
- return result;
- }
+ VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
+
+ if(m_MapCount != 0)
+ {
+ if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
+ {
+ VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
+ *ppData = m_DedicatedAllocation.m_pMappedData;
+ ++m_MapCount;
+ return VK_SUCCESS;
+ }
+ else
+ {
+ VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously.");
+ return VK_ERROR_MEMORY_MAP_FAILED;
+ }
+ }
+ else
+ {
+ VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
+ hAllocator->m_hDevice,
+ m_DedicatedAllocation.m_hMemory,
+ 0, // offset
+ VK_WHOLE_SIZE,
+ 0, // flags
+ ppData);
+ if(result == VK_SUCCESS)
+ {
+ m_DedicatedAllocation.m_pMappedData = *ppData;
+ m_MapCount = 1;
+ }
+ return result;
+ }
}
void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
{
- VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
-
- if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
- {
- --m_MapCount;
- if(m_MapCount == 0)
- {
- m_DedicatedAllocation.m_pMappedData = VMA_NULL;
- (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
- hAllocator->m_hDevice,
- m_DedicatedAllocation.m_hMemory);
- }
- }
- else
- {
- VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
- }
+ VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
+
+ if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
+ {
+ --m_MapCount;
+ if(m_MapCount == 0)
+ {
+ m_DedicatedAllocation.m_pMappedData = VMA_NULL;
+ (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
+ hAllocator->m_hDevice,
+ m_DedicatedAllocation.m_hMemory);
+ }
+ }
+ else
+ {
+ VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped.");
+ }
}
#if VMA_STATS_STRING_ENABLED
static void VmaPrintStatInfo(VmaJsonWriter& json, const VmaStatInfo& stat)
{
- json.BeginObject();
+ json.BeginObject();
- json.WriteString("Blocks");
- json.WriteNumber(stat.blockCount);
+ json.WriteString("Blocks");
+ json.WriteNumber(stat.blockCount);
- json.WriteString("Allocations");
- json.WriteNumber(stat.allocationCount);
+ json.WriteString("Allocations");
+ json.WriteNumber(stat.allocationCount);
- json.WriteString("UnusedRanges");
- json.WriteNumber(stat.unusedRangeCount);
+ json.WriteString("UnusedRanges");
+ json.WriteNumber(stat.unusedRangeCount);
- json.WriteString("UsedBytes");
- json.WriteNumber(stat.usedBytes);
+ json.WriteString("UsedBytes");
+ json.WriteNumber(stat.usedBytes);
- json.WriteString("UnusedBytes");
- json.WriteNumber(stat.unusedBytes);
+ json.WriteString("UnusedBytes");
+ json.WriteNumber(stat.unusedBytes);
- if(stat.allocationCount > 1)
- {
- json.WriteString("AllocationSize");
- json.BeginObject(true);
- json.WriteString("Min");
- json.WriteNumber(stat.allocationSizeMin);
- json.WriteString("Avg");
- json.WriteNumber(stat.allocationSizeAvg);
- json.WriteString("Max");
- json.WriteNumber(stat.allocationSizeMax);
- json.EndObject();
- }
+ if(stat.allocationCount > 1)
+ {
+ json.WriteString("AllocationSize");
+ json.BeginObject(true);
+ json.WriteString("Min");
+ json.WriteNumber(stat.allocationSizeMin);
+ json.WriteString("Avg");
+ json.WriteNumber(stat.allocationSizeAvg);
+ json.WriteString("Max");
+ json.WriteNumber(stat.allocationSizeMax);
+ json.EndObject();
+ }
- if(stat.unusedRangeCount > 1)
- {
- json.WriteString("UnusedRangeSize");
- json.BeginObject(true);
- json.WriteString("Min");
- json.WriteNumber(stat.unusedRangeSizeMin);
- json.WriteString("Avg");
- json.WriteNumber(stat.unusedRangeSizeAvg);
- json.WriteString("Max");
- json.WriteNumber(stat.unusedRangeSizeMax);
- json.EndObject();
- }
+ if(stat.unusedRangeCount > 1)
+ {
+ json.WriteString("UnusedRangeSize");
+ json.BeginObject(true);
+ json.WriteString("Min");
+ json.WriteNumber(stat.unusedRangeSizeMin);
+ json.WriteString("Avg");
+ json.WriteNumber(stat.unusedRangeSizeAvg);
+ json.WriteString("Max");
+ json.WriteNumber(stat.unusedRangeSizeMax);
+ json.EndObject();
+ }
- json.EndObject();
+ json.EndObject();
}
#endif // #if VMA_STATS_STRING_ENABLED
struct VmaSuballocationItemSizeLess
{
- bool operator()(
- const VmaSuballocationList::iterator lhs,
- const VmaSuballocationList::iterator rhs) const
- {
- return lhs->size < rhs->size;
- }
- bool operator()(
- const VmaSuballocationList::iterator lhs,
- VkDeviceSize rhsSize) const
- {
- return lhs->size < rhsSize;
- }
+ bool operator()(
+ const VmaSuballocationList::iterator lhs,
+ const VmaSuballocationList::iterator rhs) const
+ {
+ return lhs->size < rhs->size;
+ }
+ bool operator()(
+ const VmaSuballocationList::iterator lhs,
+ VkDeviceSize rhsSize) const
+ {
+ return lhs->size < rhsSize;
+ }
};
@@ -8247,72 +8085,72 @@ struct VmaSuballocationItemSizeLess
// class VmaBlockMetadata
VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
- m_Size(0),
- m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
+ m_Size(0),
+ m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
{
}
#if VMA_STATS_STRING_ENABLED
void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json,
- VkDeviceSize unusedBytes,
- size_t allocationCount,
- size_t unusedRangeCount) const
+ VkDeviceSize unusedBytes,
+ size_t allocationCount,
+ size_t unusedRangeCount) const
{
- json.BeginObject();
+ json.BeginObject();
- json.WriteString("TotalBytes");
- json.WriteNumber(GetSize());
+ json.WriteString("TotalBytes");
+ json.WriteNumber(GetSize());
- json.WriteString("UnusedBytes");
- json.WriteNumber(unusedBytes);
+ json.WriteString("UnusedBytes");
+ json.WriteNumber(unusedBytes);
- json.WriteString("Allocations");
- json.WriteNumber((uint64_t)allocationCount);
+ json.WriteString("Allocations");
+ json.WriteNumber((uint64_t)allocationCount);
- json.WriteString("UnusedRanges");
- json.WriteNumber((uint64_t)unusedRangeCount);
+ json.WriteString("UnusedRanges");
+ json.WriteNumber((uint64_t)unusedRangeCount);
- json.WriteString("Suballocations");
- json.BeginArray();
+ json.WriteString("Suballocations");
+ json.BeginArray();
}
void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VmaAllocation hAllocation) const
+ VkDeviceSize offset,
+ VmaAllocation hAllocation) const
{
- json.BeginObject(true);
-
- json.WriteString("Offset");
- json.WriteNumber(offset);
+ json.BeginObject(true);
+
+ json.WriteString("Offset");
+ json.WriteNumber(offset);
- hAllocation->PrintParameters(json);
+ hAllocation->PrintParameters(json);
- json.EndObject();
+ json.EndObject();
}
void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json,
- VkDeviceSize offset,
- VkDeviceSize size) const
+ VkDeviceSize offset,
+ VkDeviceSize size) const
{
- json.BeginObject(true);
-
- json.WriteString("Offset");
- json.WriteNumber(offset);
+ json.BeginObject(true);
+
+ json.WriteString("Offset");
+ json.WriteNumber(offset);
- json.WriteString("Type");
- json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
+ json.WriteString("Type");
+ json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
- json.WriteString("Size");
- json.WriteNumber(size);
+ json.WriteString("Size");
+ json.WriteNumber(size);
- json.EndObject();
+ json.EndObject();
}
void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const
{
- json.EndArray();
- json.EndObject();
+ json.EndArray();
+ json.EndObject();
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -8321,11 +8159,11 @@ void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const
// class VmaBlockMetadata_Generic
VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(VmaAllocator hAllocator) :
- VmaBlockMetadata(hAllocator),
- m_FreeCount(0),
- m_SumFreeSize(0),
- m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
+ VmaBlockMetadata(hAllocator),
+ m_FreeCount(0),
+ m_SumFreeSize(0),
+ m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+ m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
{
}
@@ -8335,990 +8173,990 @@ VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
{
- VmaBlockMetadata::Init(size);
+ VmaBlockMetadata::Init(size);
- m_FreeCount = 1;
- m_SumFreeSize = size;
+ m_FreeCount = 1;
+ m_SumFreeSize = size;
- VmaSuballocation suballoc = {};
- suballoc.offset = 0;
- suballoc.size = size;
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
+ VmaSuballocation suballoc = {};
+ suballoc.offset = 0;
+ suballoc.size = size;
+ suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ suballoc.hAllocation = VK_NULL_HANDLE;
- VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
- m_Suballocations.push_back(suballoc);
- VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
- --suballocItem;
- m_FreeSuballocationsBySize.push_back(suballocItem);
+ VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
+ m_Suballocations.push_back(suballoc);
+ VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
+ --suballocItem;
+ m_FreeSuballocationsBySize.push_back(suballocItem);
}
bool VmaBlockMetadata_Generic::Validate() const
{
- VMA_VALIDATE(!m_Suballocations.empty());
-
- // Expected offset of new suballocation as calculated from previous ones.
- VkDeviceSize calculatedOffset = 0;
- // Expected number of free suballocations as calculated from traversing their list.
- uint32_t calculatedFreeCount = 0;
- // Expected sum size of free suballocations as calculated from traversing their list.
- VkDeviceSize calculatedSumFreeSize = 0;
- // Expected number of free suballocations that should be registered in
- // m_FreeSuballocationsBySize calculated from traversing their list.
- size_t freeSuballocationsToRegister = 0;
- // True if previous visited suballocation was free.
- bool prevFree = false;
-
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem)
- {
- const VmaSuballocation& subAlloc = *suballocItem;
-
- // Actual offset of this suballocation doesn't match expected one.
- VMA_VALIDATE(subAlloc.offset == calculatedOffset);
-
- const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
- // Two adjacent free suballocations are invalid. They should be merged.
- VMA_VALIDATE(!prevFree || !currFree);
-
- VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
-
- if(currFree)
- {
- calculatedSumFreeSize += subAlloc.size;
- ++calculatedFreeCount;
- if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- ++freeSuballocationsToRegister;
- }
-
- // Margin required between allocations - every free space must be at least that large.
- VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
- }
- else
- {
- VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
- VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
-
- // Margin required between allocations - previous allocation must be free.
- VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
- }
-
- calculatedOffset += subAlloc.size;
- prevFree = currFree;
- }
-
- // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
- // match expected one.
- VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
-
- VkDeviceSize lastSize = 0;
- for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
- {
- VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
-
- // Only free suballocations can be registered in m_FreeSuballocationsBySize.
- VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
- // They must be sorted by size ascending.
- VMA_VALIDATE(suballocItem->size >= lastSize);
-
- lastSize = suballocItem->size;
- }
-
- // Check if totals match calculacted values.
- VMA_VALIDATE(ValidateFreeSuballocationList());
- VMA_VALIDATE(calculatedOffset == GetSize());
- VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
- VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
-
- return true;
+ VMA_VALIDATE(!m_Suballocations.empty());
+
+ // Expected offset of new suballocation as calculated from previous ones.
+ VkDeviceSize calculatedOffset = 0;
+ // Expected number of free suballocations as calculated from traversing their list.
+ uint32_t calculatedFreeCount = 0;
+ // Expected sum size of free suballocations as calculated from traversing their list.
+ VkDeviceSize calculatedSumFreeSize = 0;
+ // Expected number of free suballocations that should be registered in
+ // m_FreeSuballocationsBySize calculated from traversing their list.
+ size_t freeSuballocationsToRegister = 0;
+ // True if previous visited suballocation was free.
+ bool prevFree = false;
+
+ for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+ suballocItem != m_Suballocations.cend();
+ ++suballocItem)
+ {
+ const VmaSuballocation& subAlloc = *suballocItem;
+
+ // Actual offset of this suballocation doesn't match expected one.
+ VMA_VALIDATE(subAlloc.offset == calculatedOffset);
+
+ const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
+ // Two adjacent free suballocations are invalid. They should be merged.
+ VMA_VALIDATE(!prevFree || !currFree);
+
+ VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
+
+ if(currFree)
+ {
+ calculatedSumFreeSize += subAlloc.size;
+ ++calculatedFreeCount;
+ if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ ++freeSuballocationsToRegister;
+ }
+
+ // Margin required between allocations - every free space must be at least that large.
+ VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
+ }
+ else
+ {
+ VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
+ VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
+
+ // Margin required between allocations - previous allocation must be free.
+ VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
+ }
+
+ calculatedOffset += subAlloc.size;
+ prevFree = currFree;
+ }
+
+ // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't
+ // match expected one.
+ VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
+
+ VkDeviceSize lastSize = 0;
+ for(size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
+ {
+ VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
+
+ // Only free suballocations can be registered in m_FreeSuballocationsBySize.
+ VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+ // They must be sorted by size ascending.
+ VMA_VALIDATE(suballocItem->size >= lastSize);
+
+ lastSize = suballocItem->size;
+ }
+
+ // Check if totals match calculacted values.
+ VMA_VALIDATE(ValidateFreeSuballocationList());
+ VMA_VALIDATE(calculatedOffset == GetSize());
+ VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
+ VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
+
+ return true;
}
VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax() const
{
- if(!m_FreeSuballocationsBySize.empty())
- {
- return m_FreeSuballocationsBySize.back()->size;
- }
- else
- {
- return 0;
- }
+ if(!m_FreeSuballocationsBySize.empty())
+ {
+ return m_FreeSuballocationsBySize.back()->size;
+ }
+ else
+ {
+ return 0;
+ }
}
bool VmaBlockMetadata_Generic::IsEmpty() const
{
- return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
+ return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
}
void VmaBlockMetadata_Generic::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
{
- outInfo.blockCount = 1;
-
- const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
- outInfo.allocationCount = rangeCount - m_FreeCount;
- outInfo.unusedRangeCount = m_FreeCount;
-
- outInfo.unusedBytes = m_SumFreeSize;
- outInfo.usedBytes = GetSize() - outInfo.unusedBytes;
-
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.allocationSizeMax = 0;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
-
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem)
- {
- const VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
- }
- else
- {
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
- }
- }
+ outInfo.blockCount = 1;
+
+ const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+ outInfo.allocationCount = rangeCount - m_FreeCount;
+ outInfo.unusedRangeCount = m_FreeCount;
+
+ outInfo.unusedBytes = m_SumFreeSize;
+ outInfo.usedBytes = GetSize() - outInfo.unusedBytes;
+
+ outInfo.allocationSizeMin = UINT64_MAX;
+ outInfo.allocationSizeMax = 0;
+ outInfo.unusedRangeSizeMin = UINT64_MAX;
+ outInfo.unusedRangeSizeMax = 0;
+
+ for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+ suballocItem != m_Suballocations.cend();
+ ++suballocItem)
+ {
+ const VmaSuballocation& suballoc = *suballocItem;
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+ outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, suballoc.size);
+ }
+ else
+ {
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, suballoc.size);
+ outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, suballoc.size);
+ }
+ }
}
void VmaBlockMetadata_Generic::AddPoolStats(VmaPoolStats& inoutStats) const
{
- const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
+ const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
- inoutStats.size += GetSize();
- inoutStats.unusedSize += m_SumFreeSize;
- inoutStats.allocationCount += rangeCount - m_FreeCount;
- inoutStats.unusedRangeCount += m_FreeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
+ inoutStats.size += GetSize();
+ inoutStats.unusedSize += m_SumFreeSize;
+ inoutStats.allocationCount += rangeCount - m_FreeCount;
+ inoutStats.unusedRangeCount += m_FreeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
}
#if VMA_STATS_STRING_ENABLED
void VmaBlockMetadata_Generic::PrintDetailedMap(class VmaJsonWriter& json) const
{
- PrintDetailedMap_Begin(json,
- m_SumFreeSize, // unusedBytes
- m_Suballocations.size() - (size_t)m_FreeCount, // allocationCount
- m_FreeCount); // unusedRangeCount
-
- size_t i = 0;
- for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
- suballocItem != m_Suballocations.cend();
- ++suballocItem, ++i)
- {
- if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
- }
- else
- {
- PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
- }
- }
-
- PrintDetailedMap_End(json);
+ PrintDetailedMap_Begin(json,
+ m_SumFreeSize, // unusedBytes
+ m_Suballocations.size() - (size_t)m_FreeCount, // allocationCount
+ m_FreeCount); // unusedRangeCount
+
+ size_t i = 0;
+ for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
+ suballocItem != m_Suballocations.cend();
+ ++suballocItem, ++i)
+ {
+ if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
+ }
+ else
+ {
+ PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
+ }
+ }
+
+ PrintDetailedMap_End(json);
}
#endif // #if VMA_STATS_STRING_ENABLED
bool VmaBlockMetadata_Generic::CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(allocSize > 0);
- VMA_ASSERT(!upperAddress);
- VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(pAllocationRequest != VMA_NULL);
- VMA_HEAVY_ASSERT(Validate());
-
- pAllocationRequest->type = VmaAllocationRequestType::Normal;
-
- // There is not enough total free space in this block to fullfill the request: Early return.
- if(canMakeOtherLost == false &&
- m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
- {
- return false;
- }
-
- // New algorithm, efficiently searching freeSuballocationsBySize.
- const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
- if(freeSuballocCount > 0)
- {
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
- {
- // Find first free suballocation with size not less than allocSize + 2 * VMA_DEBUG_MARGIN.
- VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
- m_FreeSuballocationsBySize.data(),
- m_FreeSuballocationsBySize.data() + freeSuballocCount,
- allocSize + 2 * VMA_DEBUG_MARGIN,
- VmaSuballocationItemSizeLess());
- size_t index = it - m_FreeSuballocationsBySize.data();
- for(; index < freeSuballocCount; ++index)
- {
- if(CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- m_FreeSuballocationsBySize[index],
- false, // canMakeOtherLost
- &pAllocationRequest->offset,
- &pAllocationRequest->itemsToMakeLostCount,
- &pAllocationRequest->sumFreeSize,
- &pAllocationRequest->sumItemSize))
- {
- pAllocationRequest->item = m_FreeSuballocationsBySize[index];
- return true;
- }
- }
- }
- else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
- {
- for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- it != m_Suballocations.end();
- ++it)
- {
- if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- it,
- false, // canMakeOtherLost
- &pAllocationRequest->offset,
- &pAllocationRequest->itemsToMakeLostCount,
- &pAllocationRequest->sumFreeSize,
- &pAllocationRequest->sumItemSize))
- {
- pAllocationRequest->item = it;
- return true;
- }
- }
- }
- else // WORST_FIT, FIRST_FIT
- {
- // Search staring from biggest suballocations.
- for(size_t index = freeSuballocCount; index--; )
- {
- if(CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- m_FreeSuballocationsBySize[index],
- false, // canMakeOtherLost
- &pAllocationRequest->offset,
- &pAllocationRequest->itemsToMakeLostCount,
- &pAllocationRequest->sumFreeSize,
- &pAllocationRequest->sumItemSize))
- {
- pAllocationRequest->item = m_FreeSuballocationsBySize[index];
- return true;
- }
- }
- }
- }
-
- if(canMakeOtherLost)
- {
- // Brute-force algorithm. TODO: Come up with something better.
-
- bool found = false;
- VmaAllocationRequest tmpAllocRequest = {};
- tmpAllocRequest.type = VmaAllocationRequestType::Normal;
- for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
- suballocIt != m_Suballocations.end();
- ++suballocIt)
- {
- if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
- suballocIt->hAllocation->CanBecomeLost())
- {
- if(CheckAllocation(
- currentFrameIndex,
- frameInUseCount,
- bufferImageGranularity,
- allocSize,
- allocAlignment,
- allocType,
- suballocIt,
- canMakeOtherLost,
- &tmpAllocRequest.offset,
- &tmpAllocRequest.itemsToMakeLostCount,
- &tmpAllocRequest.sumFreeSize,
- &tmpAllocRequest.sumItemSize))
- {
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
- {
- *pAllocationRequest = tmpAllocRequest;
- pAllocationRequest->item = suballocIt;
- break;
- }
- if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
- {
- *pAllocationRequest = tmpAllocRequest;
- pAllocationRequest->item = suballocIt;
- found = true;
- }
- }
- }
- }
-
- return found;
- }
-
- return false;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ VMA_ASSERT(allocSize > 0);
+ VMA_ASSERT(!upperAddress);
+ VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(pAllocationRequest != VMA_NULL);
+ VMA_HEAVY_ASSERT(Validate());
+
+ pAllocationRequest->type = VmaAllocationRequestType::Normal;
+
+ // There is not enough total free space in this block to fullfill the request: Early return.
+ if(canMakeOtherLost == false &&
+ m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
+ {
+ return false;
+ }
+
+ // New algorithm, efficiently searching freeSuballocationsBySize.
+ const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
+ if(freeSuballocCount > 0)
+ {
+ if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
+ {
+ // Find first free suballocation with size not less than allocSize + 2 * VMA_DEBUG_MARGIN.
+ VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
+ m_FreeSuballocationsBySize.data(),
+ m_FreeSuballocationsBySize.data() + freeSuballocCount,
+ allocSize + 2 * VMA_DEBUG_MARGIN,
+ VmaSuballocationItemSizeLess());
+ size_t index = it - m_FreeSuballocationsBySize.data();
+ for(; index < freeSuballocCount; ++index)
+ {
+ if(CheckAllocation(
+ currentFrameIndex,
+ frameInUseCount,
+ bufferImageGranularity,
+ allocSize,
+ allocAlignment,
+ allocType,
+ m_FreeSuballocationsBySize[index],
+ false, // canMakeOtherLost
+ &pAllocationRequest->offset,
+ &pAllocationRequest->itemsToMakeLostCount,
+ &pAllocationRequest->sumFreeSize,
+ &pAllocationRequest->sumItemSize))
+ {
+ pAllocationRequest->item = m_FreeSuballocationsBySize[index];
+ return true;
+ }
+ }
+ }
+ else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
+ {
+ for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+ it != m_Suballocations.end();
+ ++it)
+ {
+ if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
+ currentFrameIndex,
+ frameInUseCount,
+ bufferImageGranularity,
+ allocSize,
+ allocAlignment,
+ allocType,
+ it,
+ false, // canMakeOtherLost
+ &pAllocationRequest->offset,
+ &pAllocationRequest->itemsToMakeLostCount,
+ &pAllocationRequest->sumFreeSize,
+ &pAllocationRequest->sumItemSize))
+ {
+ pAllocationRequest->item = it;
+ return true;
+ }
+ }
+ }
+ else // WORST_FIT, FIRST_FIT
+ {
+ // Search staring from biggest suballocations.
+ for(size_t index = freeSuballocCount; index--; )
+ {
+ if(CheckAllocation(
+ currentFrameIndex,
+ frameInUseCount,
+ bufferImageGranularity,
+ allocSize,
+ allocAlignment,
+ allocType,
+ m_FreeSuballocationsBySize[index],
+ false, // canMakeOtherLost
+ &pAllocationRequest->offset,
+ &pAllocationRequest->itemsToMakeLostCount,
+ &pAllocationRequest->sumFreeSize,
+ &pAllocationRequest->sumItemSize))
+ {
+ pAllocationRequest->item = m_FreeSuballocationsBySize[index];
+ return true;
+ }
+ }
+ }
+ }
+
+ if(canMakeOtherLost)
+ {
+ // Brute-force algorithm. TODO: Come up with something better.
+
+ bool found = false;
+ VmaAllocationRequest tmpAllocRequest = {};
+ tmpAllocRequest.type = VmaAllocationRequestType::Normal;
+ for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
+ suballocIt != m_Suballocations.end();
+ ++suballocIt)
+ {
+ if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
+ suballocIt->hAllocation->CanBecomeLost())
+ {
+ if(CheckAllocation(
+ currentFrameIndex,
+ frameInUseCount,
+ bufferImageGranularity,
+ allocSize,
+ allocAlignment,
+ allocType,
+ suballocIt,
+ canMakeOtherLost,
+ &tmpAllocRequest.offset,
+ &tmpAllocRequest.itemsToMakeLostCount,
+ &tmpAllocRequest.sumFreeSize,
+ &tmpAllocRequest.sumItemSize))
+ {
+ if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
+ {
+ *pAllocationRequest = tmpAllocRequest;
+ pAllocationRequest->item = suballocIt;
+ break;
+ }
+ if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
+ {
+ *pAllocationRequest = tmpAllocRequest;
+ pAllocationRequest->item = suballocIt;
+ found = true;
+ }
+ }
+ }
+ }
+
+ return found;
+ }
+
+ return false;
}
bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
-
- while(pAllocationRequest->itemsToMakeLostCount > 0)
- {
- if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- ++pAllocationRequest->item;
- }
- VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
- VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
- VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
- if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
- --pAllocationRequest->itemsToMakeLostCount;
- }
- else
- {
- return false;
- }
- }
-
- VMA_HEAVY_ASSERT(Validate());
- VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
- VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- return true;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
+
+ while(pAllocationRequest->itemsToMakeLostCount > 0)
+ {
+ if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ ++pAllocationRequest->item;
+ }
+ VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
+ VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
+ VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
+ if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+ {
+ pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
+ --pAllocationRequest->itemsToMakeLostCount;
+ }
+ else
+ {
+ return false;
+ }
+ }
+
+ VMA_HEAVY_ASSERT(Validate());
+ VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
+ VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ return true;
}
uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
{
- uint32_t lostAllocationCount = 0;
- for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- it != m_Suballocations.end();
- ++it)
- {
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
- it->hAllocation->CanBecomeLost() &&
- it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- it = FreeSuballocation(it);
- ++lostAllocationCount;
- }
- }
- return lostAllocationCount;
+ uint32_t lostAllocationCount = 0;
+ for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+ it != m_Suballocations.end();
+ ++it)
+ {
+ if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
+ it->hAllocation->CanBecomeLost() &&
+ it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+ {
+ it = FreeSuballocation(it);
+ ++lostAllocationCount;
+ }
+ }
+ return lostAllocationCount;
}
VkResult VmaBlockMetadata_Generic::CheckCorruption(const void* pBlockData)
{
- for(VmaSuballocationList::iterator it = m_Suballocations.begin();
- it != m_Suballocations.end();
- ++it)
- {
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- }
- }
-
- return VK_SUCCESS;
+ for(VmaSuballocationList::iterator it = m_Suballocations.begin();
+ it != m_Suballocations.end();
+ ++it)
+ {
+ if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ }
+ }
+
+ return VK_SUCCESS;
}
void VmaBlockMetadata_Generic::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation)
-{
- VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
- VMA_ASSERT(request.item != m_Suballocations.end());
- VmaSuballocation& suballoc = *request.item;
- // Given suballocation is a free block.
- VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
- // Given offset is inside this suballocation.
- VMA_ASSERT(request.offset >= suballoc.offset);
- const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
- VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
- const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
-
- // Unregister this free suballocation from m_FreeSuballocationsBySize and update
- // it to become used.
- UnregisterFreeSuballocation(request.item);
-
- suballoc.offset = request.offset;
- suballoc.size = allocSize;
- suballoc.type = type;
- suballoc.hAllocation = hAllocation;
-
- // If there are any free bytes remaining at the end, insert new free suballocation after current one.
- if(paddingEnd)
- {
- VmaSuballocation paddingSuballoc = {};
- paddingSuballoc.offset = request.offset + allocSize;
- paddingSuballoc.size = paddingEnd;
- paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- VmaSuballocationList::iterator next = request.item;
- ++next;
- const VmaSuballocationList::iterator paddingEndItem =
- m_Suballocations.insert(next, paddingSuballoc);
- RegisterFreeSuballocation(paddingEndItem);
- }
-
- // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
- if(paddingBegin)
- {
- VmaSuballocation paddingSuballoc = {};
- paddingSuballoc.offset = request.offset - paddingBegin;
- paddingSuballoc.size = paddingBegin;
- paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- const VmaSuballocationList::iterator paddingBeginItem =
- m_Suballocations.insert(request.item, paddingSuballoc);
- RegisterFreeSuballocation(paddingBeginItem);
- }
-
- // Update totals.
- m_FreeCount = m_FreeCount - 1;
- if(paddingBegin > 0)
- {
- ++m_FreeCount;
- }
- if(paddingEnd > 0)
- {
- ++m_FreeCount;
- }
- m_SumFreeSize -= allocSize;
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation)
+{
+ VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
+ VMA_ASSERT(request.item != m_Suballocations.end());
+ VmaSuballocation& suballoc = *request.item;
+ // Given suballocation is a free block.
+ VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+ // Given offset is inside this suballocation.
+ VMA_ASSERT(request.offset >= suballoc.offset);
+ const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
+ VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
+ const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
+
+ // Unregister this free suballocation from m_FreeSuballocationsBySize and update
+ // it to become used.
+ UnregisterFreeSuballocation(request.item);
+
+ suballoc.offset = request.offset;
+ suballoc.size = allocSize;
+ suballoc.type = type;
+ suballoc.hAllocation = hAllocation;
+
+ // If there are any free bytes remaining at the end, insert new free suballocation after current one.
+ if(paddingEnd)
+ {
+ VmaSuballocation paddingSuballoc = {};
+ paddingSuballoc.offset = request.offset + allocSize;
+ paddingSuballoc.size = paddingEnd;
+ paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ VmaSuballocationList::iterator next = request.item;
+ ++next;
+ const VmaSuballocationList::iterator paddingEndItem =
+ m_Suballocations.insert(next, paddingSuballoc);
+ RegisterFreeSuballocation(paddingEndItem);
+ }
+
+ // If there are any free bytes remaining at the beginning, insert new free suballocation before current one.
+ if(paddingBegin)
+ {
+ VmaSuballocation paddingSuballoc = {};
+ paddingSuballoc.offset = request.offset - paddingBegin;
+ paddingSuballoc.size = paddingBegin;
+ paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ const VmaSuballocationList::iterator paddingBeginItem =
+ m_Suballocations.insert(request.item, paddingSuballoc);
+ RegisterFreeSuballocation(paddingBeginItem);
+ }
+
+ // Update totals.
+ m_FreeCount = m_FreeCount - 1;
+ if(paddingBegin > 0)
+ {
+ ++m_FreeCount;
+ }
+ if(paddingEnd > 0)
+ {
+ ++m_FreeCount;
+ }
+ m_SumFreeSize -= allocSize;
}
void VmaBlockMetadata_Generic::Free(const VmaAllocation allocation)
{
- for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- suballocItem != m_Suballocations.end();
- ++suballocItem)
- {
- VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.hAllocation == allocation)
- {
- FreeSuballocation(suballocItem);
- VMA_HEAVY_ASSERT(Validate());
- return;
- }
- }
- VMA_ASSERT(0 && "Not found!");
+ for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+ suballocItem != m_Suballocations.end();
+ ++suballocItem)
+ {
+ VmaSuballocation& suballoc = *suballocItem;
+ if(suballoc.hAllocation == allocation)
+ {
+ FreeSuballocation(suballocItem);
+ VMA_HEAVY_ASSERT(Validate());
+ return;
+ }
+ }
+ VMA_ASSERT(0 && "Not found!");
}
void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
{
- for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
- suballocItem != m_Suballocations.end();
- ++suballocItem)
- {
- VmaSuballocation& suballoc = *suballocItem;
- if(suballoc.offset == offset)
- {
- FreeSuballocation(suballocItem);
- return;
- }
- }
- VMA_ASSERT(0 && "Not found!");
+ for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
+ suballocItem != m_Suballocations.end();
+ ++suballocItem)
+ {
+ VmaSuballocation& suballoc = *suballocItem;
+ if(suballoc.offset == offset)
+ {
+ FreeSuballocation(suballocItem);
+ return;
+ }
+ }
+ VMA_ASSERT(0 && "Not found!");
}
bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList() const
{
- VkDeviceSize lastSize = 0;
- for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
- {
- const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
+ VkDeviceSize lastSize = 0;
+ for(size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
+ {
+ const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
- VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
- VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
- VMA_VALIDATE(it->size >= lastSize);
- lastSize = it->size;
- }
- return true;
+ VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
+ VMA_VALIDATE(it->size >= lastSize);
+ lastSize = it->size;
+ }
+ return true;
}
bool VmaBlockMetadata_Generic::CheckAllocation(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- VmaSuballocationList::const_iterator suballocItem,
- bool canMakeOtherLost,
- VkDeviceSize* pOffset,
- size_t* itemsToMakeLostCount,
- VkDeviceSize* pSumFreeSize,
- VkDeviceSize* pSumItemSize) const
-{
- VMA_ASSERT(allocSize > 0);
- VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(suballocItem != m_Suballocations.cend());
- VMA_ASSERT(pOffset != VMA_NULL);
-
- *itemsToMakeLostCount = 0;
- *pSumFreeSize = 0;
- *pSumItemSize = 0;
-
- if(canMakeOtherLost)
- {
- if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- *pSumFreeSize = suballocItem->size;
- }
- else
- {
- if(suballocItem->hAllocation->CanBecomeLost() &&
- suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++*itemsToMakeLostCount;
- *pSumItemSize = suballocItem->size;
- }
- else
- {
- return false;
- }
- }
-
- // Remaining size is too small for this request: Early return.
- if(GetSize() - suballocItem->offset < allocSize)
- {
- return false;
- }
-
- // Start from offset equal to beginning of this suballocation.
- *pOffset = suballocItem->offset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- *pOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- *pOffset = VmaAlignUp(*pOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1)
- {
- bool bufferImageGranularityConflict = false;
- VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
- while(prevSuballocItem != m_Suballocations.cbegin())
- {
- --prevSuballocItem;
- const VmaSuballocation& prevSuballoc = *prevSuballocItem;
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
- }
- }
-
- // Now that we have final *pOffset, check if we are past suballocItem.
- // If yes, return false - this function should be called for another suballocItem as starting point.
- if(*pOffset >= suballocItem->offset + suballocItem->size)
- {
- return false;
- }
-
- // Calculate padding at the beginning based on current offset.
- const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
-
- // Calculate required margin at the end.
- const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
-
- const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
- // Another early return check.
- if(suballocItem->offset + totalSize > GetSize())
- {
- return false;
- }
-
- // Advance lastSuballocItem until desired size is reached.
- // Update itemsToMakeLostCount.
- VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
- if(totalSize > suballocItem->size)
- {
- VkDeviceSize remainingSize = totalSize - suballocItem->size;
- while(remainingSize > 0)
- {
- ++lastSuballocItem;
- if(lastSuballocItem == m_Suballocations.cend())
- {
- return false;
- }
- if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- *pSumFreeSize += lastSuballocItem->size;
- }
- else
- {
- VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
- if(lastSuballocItem->hAllocation->CanBecomeLost() &&
- lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++*itemsToMakeLostCount;
- *pSumItemSize += lastSuballocItem->size;
- }
- else
- {
- return false;
- }
- }
- remainingSize = (lastSuballocItem->size < remainingSize) ?
- remainingSize - lastSuballocItem->size : 0;
- }
- }
-
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, we must mark more allocations lost or fail.
- if(bufferImageGranularity > 1)
- {
- VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
- ++nextSuballocItem;
- while(nextSuballocItem != m_Suballocations.cend())
- {
- const VmaSuballocation& nextSuballoc = *nextSuballocItem;
- if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
- if(nextSuballoc.hAllocation->CanBecomeLost() &&
- nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++*itemsToMakeLostCount;
- }
- else
- {
- return false;
- }
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- ++nextSuballocItem;
- }
- }
- }
- else
- {
- const VmaSuballocation& suballoc = *suballocItem;
- VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- *pSumFreeSize = suballoc.size;
-
- // Size of this suballocation is too small for this request: Early return.
- if(suballoc.size < allocSize)
- {
- return false;
- }
-
- // Start from offset equal to beginning of this suballocation.
- *pOffset = suballoc.offset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- *pOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- *pOffset = VmaAlignUp(*pOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1)
- {
- bool bufferImageGranularityConflict = false;
- VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
- while(prevSuballocItem != m_Suballocations.cbegin())
- {
- --prevSuballocItem;
- const VmaSuballocation& prevSuballoc = *prevSuballocItem;
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
- }
- }
-
- // Calculate padding at the beginning based on current offset.
- const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
-
- // Calculate required margin at the end.
- const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
-
- // Fail if requested size plus margin before and after is bigger than size of this suballocation.
- if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
- {
- return false;
- }
-
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1)
- {
- VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
- ++nextSuballocItem;
- while(nextSuballocItem != m_Suballocations.cend())
- {
- const VmaSuballocation& nextSuballoc = *nextSuballocItem;
- if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- ++nextSuballocItem;
- }
- }
- }
-
- // All tests passed: Success. pOffset is already filled.
- return true;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ VmaSuballocationList::const_iterator suballocItem,
+ bool canMakeOtherLost,
+ VkDeviceSize* pOffset,
+ size_t* itemsToMakeLostCount,
+ VkDeviceSize* pSumFreeSize,
+ VkDeviceSize* pSumItemSize) const
+{
+ VMA_ASSERT(allocSize > 0);
+ VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(suballocItem != m_Suballocations.cend());
+ VMA_ASSERT(pOffset != VMA_NULL);
+
+ *itemsToMakeLostCount = 0;
+ *pSumFreeSize = 0;
+ *pSumItemSize = 0;
+
+ if(canMakeOtherLost)
+ {
+ if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ *pSumFreeSize = suballocItem->size;
+ }
+ else
+ {
+ if(suballocItem->hAllocation->CanBecomeLost() &&
+ suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+ {
+ ++*itemsToMakeLostCount;
+ *pSumItemSize = suballocItem->size;
+ }
+ else
+ {
+ return false;
+ }
+ }
+
+ // Remaining size is too small for this request: Early return.
+ if(GetSize() - suballocItem->offset < allocSize)
+ {
+ return false;
+ }
+
+ // Start from offset equal to beginning of this suballocation.
+ *pOffset = suballocItem->offset;
+
+ // Apply VMA_DEBUG_MARGIN at the beginning.
+ if(VMA_DEBUG_MARGIN > 0)
+ {
+ *pOffset += VMA_DEBUG_MARGIN;
+ }
+
+ // Apply alignment.
+ *pOffset = VmaAlignUp(*pOffset, allocAlignment);
+
+ // Check previous suballocations for BufferImageGranularity conflicts.
+ // Make bigger alignment if necessary.
+ if(bufferImageGranularity > 1)
+ {
+ bool bufferImageGranularityConflict = false;
+ VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
+ while(prevSuballocItem != m_Suballocations.cbegin())
+ {
+ --prevSuballocItem;
+ const VmaSuballocation& prevSuballoc = *prevSuballocItem;
+ if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+ {
+ bufferImageGranularityConflict = true;
+ break;
+ }
+ }
+ else
+ // Already on previous page.
+ break;
+ }
+ if(bufferImageGranularityConflict)
+ {
+ *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
+ }
+ }
+
+ // Now that we have final *pOffset, check if we are past suballocItem.
+ // If yes, return false - this function should be called for another suballocItem as starting point.
+ if(*pOffset >= suballocItem->offset + suballocItem->size)
+ {
+ return false;
+ }
+
+ // Calculate padding at the beginning based on current offset.
+ const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
+
+ // Calculate required margin at the end.
+ const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
+
+ const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
+ // Another early return check.
+ if(suballocItem->offset + totalSize > GetSize())
+ {
+ return false;
+ }
+
+ // Advance lastSuballocItem until desired size is reached.
+ // Update itemsToMakeLostCount.
+ VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
+ if(totalSize > suballocItem->size)
+ {
+ VkDeviceSize remainingSize = totalSize - suballocItem->size;
+ while(remainingSize > 0)
+ {
+ ++lastSuballocItem;
+ if(lastSuballocItem == m_Suballocations.cend())
+ {
+ return false;
+ }
+ if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ *pSumFreeSize += lastSuballocItem->size;
+ }
+ else
+ {
+ VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
+ if(lastSuballocItem->hAllocation->CanBecomeLost() &&
+ lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+ {
+ ++*itemsToMakeLostCount;
+ *pSumItemSize += lastSuballocItem->size;
+ }
+ else
+ {
+ return false;
+ }
+ }
+ remainingSize = (lastSuballocItem->size < remainingSize) ?
+ remainingSize - lastSuballocItem->size : 0;
+ }
+ }
+
+ // Check next suballocations for BufferImageGranularity conflicts.
+ // If conflict exists, we must mark more allocations lost or fail.
+ if(bufferImageGranularity > 1)
+ {
+ VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
+ ++nextSuballocItem;
+ while(nextSuballocItem != m_Suballocations.cend())
+ {
+ const VmaSuballocation& nextSuballoc = *nextSuballocItem;
+ if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+ {
+ VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
+ if(nextSuballoc.hAllocation->CanBecomeLost() &&
+ nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+ {
+ ++*itemsToMakeLostCount;
+ }
+ else
+ {
+ return false;
+ }
+ }
+ }
+ else
+ {
+ // Already on next page.
+ break;
+ }
+ ++nextSuballocItem;
+ }
+ }
+ }
+ else
+ {
+ const VmaSuballocation& suballoc = *suballocItem;
+ VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ *pSumFreeSize = suballoc.size;
+
+ // Size of this suballocation is too small for this request: Early return.
+ if(suballoc.size < allocSize)
+ {
+ return false;
+ }
+
+ // Start from offset equal to beginning of this suballocation.
+ *pOffset = suballoc.offset;
+
+ // Apply VMA_DEBUG_MARGIN at the beginning.
+ if(VMA_DEBUG_MARGIN > 0)
+ {
+ *pOffset += VMA_DEBUG_MARGIN;
+ }
+
+ // Apply alignment.
+ *pOffset = VmaAlignUp(*pOffset, allocAlignment);
+
+ // Check previous suballocations for BufferImageGranularity conflicts.
+ // Make bigger alignment if necessary.
+ if(bufferImageGranularity > 1)
+ {
+ bool bufferImageGranularityConflict = false;
+ VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
+ while(prevSuballocItem != m_Suballocations.cbegin())
+ {
+ --prevSuballocItem;
+ const VmaSuballocation& prevSuballoc = *prevSuballocItem;
+ if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+ {
+ bufferImageGranularityConflict = true;
+ break;
+ }
+ }
+ else
+ // Already on previous page.
+ break;
+ }
+ if(bufferImageGranularityConflict)
+ {
+ *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
+ }
+ }
+
+ // Calculate padding at the beginning based on current offset.
+ const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
+
+ // Calculate required margin at the end.
+ const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
+
+ // Fail if requested size plus margin before and after is bigger than size of this suballocation.
+ if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
+ {
+ return false;
+ }
+
+ // Check next suballocations for BufferImageGranularity conflicts.
+ // If conflict exists, allocation cannot be made here.
+ if(bufferImageGranularity > 1)
+ {
+ VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
+ ++nextSuballocItem;
+ while(nextSuballocItem != m_Suballocations.cend())
+ {
+ const VmaSuballocation& nextSuballoc = *nextSuballocItem;
+ if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+ {
+ return false;
+ }
+ }
+ else
+ {
+ // Already on next page.
+ break;
+ }
+ ++nextSuballocItem;
+ }
+ }
+ }
+
+ // All tests passed: Success. pOffset is already filled.
+ return true;
}
void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
{
- VMA_ASSERT(item != m_Suballocations.end());
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VmaSuballocationList::iterator nextItem = item;
- ++nextItem;
- VMA_ASSERT(nextItem != m_Suballocations.end());
- VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(item != m_Suballocations.end());
+ VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ VmaSuballocationList::iterator nextItem = item;
+ ++nextItem;
+ VMA_ASSERT(nextItem != m_Suballocations.end());
+ VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
- item->size += nextItem->size;
- --m_FreeCount;
- m_Suballocations.erase(nextItem);
+ item->size += nextItem->size;
+ --m_FreeCount;
+ m_Suballocations.erase(nextItem);
}
VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
{
- // Change this suballocation to be marked as free.
- VmaSuballocation& suballoc = *suballocItem;
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
-
- // Update totals.
- ++m_FreeCount;
- m_SumFreeSize += suballoc.size;
-
- // Merge with previous and/or next suballocation if it's also free.
- bool mergeWithNext = false;
- bool mergeWithPrev = false;
-
- VmaSuballocationList::iterator nextItem = suballocItem;
- ++nextItem;
- if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
- {
- mergeWithNext = true;
- }
-
- VmaSuballocationList::iterator prevItem = suballocItem;
- if(suballocItem != m_Suballocations.begin())
- {
- --prevItem;
- if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- mergeWithPrev = true;
- }
- }
-
- if(mergeWithNext)
- {
- UnregisterFreeSuballocation(nextItem);
- MergeFreeWithNext(suballocItem);
- }
-
- if(mergeWithPrev)
- {
- UnregisterFreeSuballocation(prevItem);
- MergeFreeWithNext(prevItem);
- RegisterFreeSuballocation(prevItem);
- return prevItem;
- }
- else
- {
- RegisterFreeSuballocation(suballocItem);
- return suballocItem;
- }
+ // Change this suballocation to be marked as free.
+ VmaSuballocation& suballoc = *suballocItem;
+ suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ suballoc.hAllocation = VK_NULL_HANDLE;
+
+ // Update totals.
+ ++m_FreeCount;
+ m_SumFreeSize += suballoc.size;
+
+ // Merge with previous and/or next suballocation if it's also free.
+ bool mergeWithNext = false;
+ bool mergeWithPrev = false;
+
+ VmaSuballocationList::iterator nextItem = suballocItem;
+ ++nextItem;
+ if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
+ {
+ mergeWithNext = true;
+ }
+
+ VmaSuballocationList::iterator prevItem = suballocItem;
+ if(suballocItem != m_Suballocations.begin())
+ {
+ --prevItem;
+ if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ mergeWithPrev = true;
+ }
+ }
+
+ if(mergeWithNext)
+ {
+ UnregisterFreeSuballocation(nextItem);
+ MergeFreeWithNext(suballocItem);
+ }
+
+ if(mergeWithPrev)
+ {
+ UnregisterFreeSuballocation(prevItem);
+ MergeFreeWithNext(prevItem);
+ RegisterFreeSuballocation(prevItem);
+ return prevItem;
+ }
+ else
+ {
+ RegisterFreeSuballocation(suballocItem);
+ return suballocItem;
+ }
}
void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
{
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(item->size > 0);
+ VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(item->size > 0);
- // You may want to enable this validation at the beginning or at the end of
- // this function, depending on what do you want to check.
- VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+ // You may want to enable this validation at the beginning or at the end of
+ // this function, depending on what do you want to check.
+ VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
- if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- if(m_FreeSuballocationsBySize.empty())
- {
- m_FreeSuballocationsBySize.push_back(item);
- }
- else
- {
- VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
- }
- }
+ if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ if(m_FreeSuballocationsBySize.empty())
+ {
+ m_FreeSuballocationsBySize.push_back(item);
+ }
+ else
+ {
+ VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
+ }
+ }
- //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+ //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
}
void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
{
- VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(item->size > 0);
-
- // You may want to enable this validation at the beginning or at the end of
- // this function, depending on what do you want to check.
- VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
-
- if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
- m_FreeSuballocationsBySize.data(),
- m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
- item,
- VmaSuballocationItemSizeLess());
- for(size_t index = it - m_FreeSuballocationsBySize.data();
- index < m_FreeSuballocationsBySize.size();
- ++index)
- {
- if(m_FreeSuballocationsBySize[index] == item)
- {
- VmaVectorRemove(m_FreeSuballocationsBySize, index);
- return;
- }
- VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
- }
- VMA_ASSERT(0 && "Not found.");
- }
-
- //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+ VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(item->size > 0);
+
+ // You may want to enable this validation at the beginning or at the end of
+ // this function, depending on what do you want to check.
+ VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
+
+ if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess(
+ m_FreeSuballocationsBySize.data(),
+ m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
+ item,
+ VmaSuballocationItemSizeLess());
+ for(size_t index = it - m_FreeSuballocationsBySize.data();
+ index < m_FreeSuballocationsBySize.size();
+ ++index)
+ {
+ if(m_FreeSuballocationsBySize[index] == item)
+ {
+ VmaVectorRemove(m_FreeSuballocationsBySize, index);
+ return;
+ }
+ VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found.");
+ }
+ VMA_ASSERT(0 && "Not found.");
+ }
+
+ //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
}
bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
- VkDeviceSize bufferImageGranularity,
- VmaSuballocationType& inOutPrevSuballocType) const
-{
- if(bufferImageGranularity == 1 || IsEmpty())
- {
- return false;
- }
-
- VkDeviceSize minAlignment = VK_WHOLE_SIZE;
- bool typeConflictFound = false;
- for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
- it != m_Suballocations.cend();
- ++it)
- {
- const VmaSuballocationType suballocType = it->type;
- if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
- {
- minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
- if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
- {
- typeConflictFound = true;
- }
- inOutPrevSuballocType = suballocType;
- }
- }
-
- return typeConflictFound || minAlignment >= bufferImageGranularity;
+ VkDeviceSize bufferImageGranularity,
+ VmaSuballocationType& inOutPrevSuballocType) const
+{
+ if(bufferImageGranularity == 1 || IsEmpty())
+ {
+ return false;
+ }
+
+ VkDeviceSize minAlignment = VK_WHOLE_SIZE;
+ bool typeConflictFound = false;
+ for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
+ it != m_Suballocations.cend();
+ ++it)
+ {
+ const VmaSuballocationType suballocType = it->type;
+ if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
+ if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
+ {
+ typeConflictFound = true;
+ }
+ inOutPrevSuballocType = suballocType;
+ }
+ }
+
+ return typeConflictFound || minAlignment >= bufferImageGranularity;
}
////////////////////////////////////////////////////////////////////////////////
// class VmaBlockMetadata_Linear
VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(VmaAllocator hAllocator) :
- VmaBlockMetadata(hAllocator),
- m_SumFreeSize(0),
- m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
- m_1stVectorIndex(0),
- m_2ndVectorMode(SECOND_VECTOR_EMPTY),
- m_1stNullItemsBeginCount(0),
- m_1stNullItemsMiddleCount(0),
- m_2ndNullItemsCount(0)
+ VmaBlockMetadata(hAllocator),
+ m_SumFreeSize(0),
+ m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+ m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
+ m_1stVectorIndex(0),
+ m_2ndVectorMode(SECOND_VECTOR_EMPTY),
+ m_1stNullItemsBeginCount(0),
+ m_1stNullItemsMiddleCount(0),
+ m_2ndNullItemsCount(0)
{
}
@@ -9328,1728 +9166,1728 @@ VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
{
- VmaBlockMetadata::Init(size);
- m_SumFreeSize = size;
+ VmaBlockMetadata::Init(size);
+ m_SumFreeSize = size;
}
bool VmaBlockMetadata_Linear::Validate() const
{
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
- VMA_VALIDATE(!suballocations1st.empty() ||
- suballocations2nd.empty() ||
- m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
-
- if(!suballocations1st.empty())
- {
- // Null item at the beginning should be accounted into m_1stNullItemsBeginCount.
- VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
- // Null item at the end should be just pop_back().
- VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
- }
- if(!suballocations2nd.empty())
- {
- // Null item at the end should be just pop_back().
- VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
- }
-
- VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
- VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
-
- VkDeviceSize sumUsedSize = 0;
- const size_t suballoc1stCount = suballocations1st.size();
- VkDeviceSize offset = VMA_DEBUG_MARGIN;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const size_t suballoc2ndCount = suballocations2nd.size();
- size_t nullItem2ndCount = 0;
- for(size_t i = 0; i < suballoc2ndCount; ++i)
- {
- const VmaSuballocation& suballoc = suballocations2nd[i];
- const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
- VMA_VALIDATE(suballoc.offset >= offset);
-
- if(!currFree)
- {
- VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
- VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
- sumUsedSize += suballoc.size;
- }
- else
- {
- ++nullItem2ndCount;
- }
-
- offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
- }
-
- VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
- }
-
- for(size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
- {
- const VmaSuballocation& suballoc = suballocations1st[i];
- VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
- suballoc.hAllocation == VK_NULL_HANDLE);
- }
-
- size_t nullItem1stCount = m_1stNullItemsBeginCount;
-
- for(size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
- {
- const VmaSuballocation& suballoc = suballocations1st[i];
- const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
- VMA_VALIDATE(suballoc.offset >= offset);
- VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
-
- if(!currFree)
- {
- VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
- VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
- sumUsedSize += suballoc.size;
- }
- else
- {
- ++nullItem1stCount;
- }
-
- offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
- }
- VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- const size_t suballoc2ndCount = suballocations2nd.size();
- size_t nullItem2ndCount = 0;
- for(size_t i = suballoc2ndCount; i--; )
- {
- const VmaSuballocation& suballoc = suballocations2nd[i];
- const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
-
- VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
- VMA_VALIDATE(suballoc.offset >= offset);
-
- if(!currFree)
- {
- VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
- VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
- sumUsedSize += suballoc.size;
- }
- else
- {
- ++nullItem2ndCount;
- }
-
- offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
- }
-
- VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
- }
-
- VMA_VALIDATE(offset <= GetSize());
- VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
-
- return true;
+ const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+ VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
+ VMA_VALIDATE(!suballocations1st.empty() ||
+ suballocations2nd.empty() ||
+ m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
+
+ if(!suballocations1st.empty())
+ {
+ // Null item at the beginning should be accounted into m_1stNullItemsBeginCount.
+ VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
+ // Null item at the end should be just pop_back().
+ VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
+ }
+ if(!suballocations2nd.empty())
+ {
+ // Null item at the end should be just pop_back().
+ VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
+ }
+
+ VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
+ VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
+
+ VkDeviceSize sumUsedSize = 0;
+ const size_t suballoc1stCount = suballocations1st.size();
+ VkDeviceSize offset = VMA_DEBUG_MARGIN;
+
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ const size_t suballoc2ndCount = suballocations2nd.size();
+ size_t nullItem2ndCount = 0;
+ for(size_t i = 0; i < suballoc2ndCount; ++i)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[i];
+ const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+ VMA_VALIDATE(suballoc.offset >= offset);
+
+ if(!currFree)
+ {
+ VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+ VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+ sumUsedSize += suballoc.size;
+ }
+ else
+ {
+ ++nullItem2ndCount;
+ }
+
+ offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+ }
+
+ VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
+ }
+
+ for(size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[i];
+ VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
+ suballoc.hAllocation == VK_NULL_HANDLE);
+ }
+
+ size_t nullItem1stCount = m_1stNullItemsBeginCount;
+
+ for(size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[i];
+ const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+ VMA_VALIDATE(suballoc.offset >= offset);
+ VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
+
+ if(!currFree)
+ {
+ VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+ VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+ sumUsedSize += suballoc.size;
+ }
+ else
+ {
+ ++nullItem1stCount;
+ }
+
+ offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+ }
+ VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
+
+ if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ const size_t suballoc2ndCount = suballocations2nd.size();
+ size_t nullItem2ndCount = 0;
+ for(size_t i = suballoc2ndCount; i--; )
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[i];
+ const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
+
+ VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
+ VMA_VALIDATE(suballoc.offset >= offset);
+
+ if(!currFree)
+ {
+ VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
+ VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
+ sumUsedSize += suballoc.size;
+ }
+ else
+ {
+ ++nullItem2ndCount;
+ }
+
+ offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
+ }
+
+ VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
+ }
+
+ VMA_VALIDATE(offset <= GetSize());
+ VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
+
+ return true;
}
size_t VmaBlockMetadata_Linear::GetAllocationCount() const
{
- return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
- AccessSuballocations2nd().size() - m_2ndNullItemsCount;
+ return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
+ AccessSuballocations2nd().size() - m_2ndNullItemsCount;
}
VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax() const
{
- const VkDeviceSize size = GetSize();
-
- /*
- We don't consider gaps inside allocation vectors with freed allocations because
- they are not suitable for reuse in linear allocator. We consider only space that
- is available for new allocations.
- */
- if(IsEmpty())
- {
- return size;
- }
-
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-
- switch(m_2ndVectorMode)
- {
- case SECOND_VECTOR_EMPTY:
- /*
- Available space is after end of 1st, as well as before beginning of 1st (which
- whould make it a ring buffer).
- */
- {
- const size_t suballocations1stCount = suballocations1st.size();
- VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
- const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
- const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
- return VMA_MAX(
- firstSuballoc.offset,
- size - (lastSuballoc.offset + lastSuballoc.size));
- }
- break;
-
- case SECOND_VECTOR_RING_BUFFER:
- /*
- Available space is only between end of 2nd and beginning of 1st.
- */
- {
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
- const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
- return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
- }
- break;
-
- case SECOND_VECTOR_DOUBLE_STACK:
- /*
- Available space is only between end of 1st and top of 2nd.
- */
- {
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
- const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
- return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
- }
- break;
-
- default:
- VMA_ASSERT(0);
- return 0;
- }
+ const VkDeviceSize size = GetSize();
+
+ /*
+ We don't consider gaps inside allocation vectors with freed allocations because
+ they are not suitable for reuse in linear allocator. We consider only space that
+ is available for new allocations.
+ */
+ if(IsEmpty())
+ {
+ return size;
+ }
+
+ const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+ switch(m_2ndVectorMode)
+ {
+ case SECOND_VECTOR_EMPTY:
+ /*
+ Available space is after end of 1st, as well as before beginning of 1st (which
+ whould make it a ring buffer).
+ */
+ {
+ const size_t suballocations1stCount = suballocations1st.size();
+ VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
+ const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
+ const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
+ return VMA_MAX(
+ firstSuballoc.offset,
+ size - (lastSuballoc.offset + lastSuballoc.size));
+ }
+ break;
+
+ case SECOND_VECTOR_RING_BUFFER:
+ /*
+ Available space is only between end of 2nd and beginning of 1st.
+ */
+ {
+ const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
+ const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
+ return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
+ }
+ break;
+
+ case SECOND_VECTOR_DOUBLE_STACK:
+ /*
+ Available space is only between end of 1st and top of 2nd.
+ */
+ {
+ const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
+ const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
+ return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
+ }
+ break;
+
+ default:
+ VMA_ASSERT(0);
+ return 0;
+ }
}
void VmaBlockMetadata_Linear::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
{
- const VkDeviceSize size = GetSize();
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t suballoc2ndCount = suballocations2nd.size();
-
- outInfo.blockCount = 1;
- outInfo.allocationCount = (uint32_t)GetAllocationCount();
- outInfo.unusedRangeCount = 0;
- outInfo.usedBytes = 0;
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.allocationSizeMax = 0;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMax = 0;
-
- VkDeviceSize lastOffset = 0;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = 0;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- outInfo.usedBytes += suballoc.size;
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
- const VkDeviceSize freeSpace1stTo2ndEnd =
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- outInfo.usedBytes += suballoc.size;
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- if(lastOffset < freeSpace1stTo2ndEnd)
- {
- const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- outInfo.usedBytes += suballoc.size;
- outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
- outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- // There is free space from lastOffset to size.
- if(lastOffset < size)
- {
- const VkDeviceSize unusedRangeSize = size - lastOffset;
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
- outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-
- outInfo.unusedBytes = size - outInfo.usedBytes;
+ const VkDeviceSize size = GetSize();
+ const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ const size_t suballoc1stCount = suballocations1st.size();
+ const size_t suballoc2ndCount = suballocations2nd.size();
+
+ outInfo.blockCount = 1;
+ outInfo.allocationCount = (uint32_t)GetAllocationCount();
+ outInfo.unusedRangeCount = 0;
+ outInfo.usedBytes = 0;
+ outInfo.allocationSizeMin = UINT64_MAX;
+ outInfo.allocationSizeMax = 0;
+ outInfo.unusedRangeSizeMin = UINT64_MAX;
+ outInfo.unusedRangeSizeMax = 0;
+
+ VkDeviceSize lastOffset = 0;
+
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+ size_t nextAlloc2ndIndex = 0;
+ while(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // Find next non-null allocation or move nextAllocIndex to the end.
+ while(nextAlloc2ndIndex < suballoc2ndCount &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex < suballoc2ndCount)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ outInfo.usedBytes += suballoc.size;
+ outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+ outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+ if(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = freeSpace2ndTo1stEnd;
+ }
+ }
+ }
+
+ size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+ const VkDeviceSize freeSpace1stTo2ndEnd =
+ m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+ while(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ // Find next non-null allocation or move nextAllocIndex to the end.
+ while(nextAlloc1stIndex < suballoc1stCount &&
+ suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc1stIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc1stIndex < suballoc1stCount)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ outInfo.usedBytes += suballoc.size;
+ outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+ outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc1stIndex;
+ }
+ // We are at the end.
+ else
+ {
+ // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+ if(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = freeSpace1stTo2ndEnd;
+ }
+ }
+
+ if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+ while(lastOffset < size)
+ {
+ // Find next non-null allocation or move nextAllocIndex to the end.
+ while(nextAlloc2ndIndex != SIZE_MAX &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ --nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex != SIZE_MAX)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ outInfo.usedBytes += suballoc.size;
+ outInfo.allocationSizeMin = VMA_MIN(outInfo.allocationSizeMin, suballoc.size);
+ outInfo.allocationSizeMax = VMA_MIN(outInfo.allocationSizeMax, suballoc.size);
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ --nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ // There is free space from lastOffset to size.
+ if(lastOffset < size)
+ {
+ const VkDeviceSize unusedRangeSize = size - lastOffset;
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ outInfo.unusedRangeSizeMax = VMA_MIN(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = size;
+ }
+ }
+ }
+
+ outInfo.unusedBytes = size - outInfo.usedBytes;
}
void VmaBlockMetadata_Linear::AddPoolStats(VmaPoolStats& inoutStats) const
{
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const VkDeviceSize size = GetSize();
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t suballoc2ndCount = suballocations2nd.size();
-
- inoutStats.size += size;
-
- VkDeviceSize lastOffset = 0;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++inoutStats.allocationCount;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
- const VkDeviceSize freeSpace1stTo2ndEnd =
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++inoutStats.allocationCount;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace1stTo2ndEnd)
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++inoutStats.allocationCount;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to size.
- const VkDeviceSize unusedRangeSize = size - lastOffset;
- inoutStats.unusedSize += unusedRangeSize;
- ++inoutStats.unusedRangeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
+ const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ const VkDeviceSize size = GetSize();
+ const size_t suballoc1stCount = suballocations1st.size();
+ const size_t suballoc2ndCount = suballocations2nd.size();
+
+ inoutStats.size += size;
+
+ VkDeviceSize lastOffset = 0;
+
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+ size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
+ while(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+ while(nextAlloc2ndIndex < suballoc2ndCount &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex < suballoc2ndCount)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ inoutStats.unusedSize += unusedRangeSize;
+ ++inoutStats.unusedRangeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ ++inoutStats.allocationCount;
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+ const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+ inoutStats.unusedSize += unusedRangeSize;
+ ++inoutStats.unusedRangeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = freeSpace2ndTo1stEnd;
+ }
+ }
+ }
+
+ size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+ const VkDeviceSize freeSpace1stTo2ndEnd =
+ m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+ while(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ // Find next non-null allocation or move nextAllocIndex to the end.
+ while(nextAlloc1stIndex < suballoc1stCount &&
+ suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc1stIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc1stIndex < suballoc1stCount)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ inoutStats.unusedSize += unusedRangeSize;
+ ++inoutStats.unusedRangeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ ++inoutStats.allocationCount;
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc1stIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+ const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+ inoutStats.unusedSize += unusedRangeSize;
+ ++inoutStats.unusedRangeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = freeSpace1stTo2ndEnd;
+ }
+ }
+
+ if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+ while(lastOffset < size)
+ {
+ // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+ while(nextAlloc2ndIndex != SIZE_MAX &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ --nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex != SIZE_MAX)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ inoutStats.unusedSize += unusedRangeSize;
+ ++inoutStats.unusedRangeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ ++inoutStats.allocationCount;
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ --nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < size)
+ {
+ // There is free space from lastOffset to size.
+ const VkDeviceSize unusedRangeSize = size - lastOffset;
+ inoutStats.unusedSize += unusedRangeSize;
+ ++inoutStats.unusedRangeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = size;
+ }
+ }
+ }
}
#if VMA_STATS_STRING_ENABLED
void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const
{
- const VkDeviceSize size = GetSize();
- const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t suballoc2ndCount = suballocations2nd.size();
-
- // FIRST PASS
-
- size_t unusedRangeCount = 0;
- VkDeviceSize usedBytes = 0;
-
- VkDeviceSize lastOffset = 0;
-
- size_t alloc2ndCount = 0;
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = 0;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- ++unusedRangeCount;
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++alloc2ndCount;
- usedBytes += suballoc.size;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- ++unusedRangeCount;
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
- size_t alloc1stCount = 0;
- const VkDeviceSize freeSpace1stTo2ndEnd =
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- ++unusedRangeCount;
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++alloc1stCount;
- usedBytes += suballoc.size;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- ++unusedRangeCount;
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- ++unusedRangeCount;
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- ++alloc2ndCount;
- usedBytes += suballoc.size;
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to size.
- ++unusedRangeCount;
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-
- const VkDeviceSize unusedBytes = size - usedBytes;
- PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
-
- // SECOND PASS
- lastOffset = 0;
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
- size_t nextAlloc2ndIndex = 0;
- while(lastOffset < freeSpace2ndTo1stEnd)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex < suballoc2ndCount &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex < suballoc2ndCount)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace2ndTo1stEnd)
- {
- // There is free space from lastOffset to freeSpace2ndTo1stEnd.
- const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace2ndTo1stEnd;
- }
- }
- }
-
- nextAlloc1stIndex = m_1stNullItemsBeginCount;
- while(lastOffset < freeSpace1stTo2ndEnd)
- {
- // Find next non-null allocation or move nextAllocIndex to the end.
- while(nextAlloc1stIndex < suballoc1stCount &&
- suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++nextAlloc1stIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc1stIndex < suballoc1stCount)
- {
- const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- ++nextAlloc1stIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < freeSpace1stTo2ndEnd)
- {
- // There is free space from lastOffset to freeSpace1stTo2ndEnd.
- const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = freeSpace1stTo2ndEnd;
- }
- }
-
- if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
- while(lastOffset < size)
- {
- // Find next non-null allocation or move nextAlloc2ndIndex to the end.
- while(nextAlloc2ndIndex != SIZE_MAX &&
- suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
- {
- --nextAlloc2ndIndex;
- }
-
- // Found non-null allocation.
- if(nextAlloc2ndIndex != SIZE_MAX)
- {
- const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
-
- // 1. Process free space before this allocation.
- if(lastOffset < suballoc.offset)
- {
- // There is free space from lastOffset to suballoc.offset.
- const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // 2. Process this allocation.
- // There is allocation with suballoc.offset, suballoc.size.
- PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
-
- // 3. Prepare for next iteration.
- lastOffset = suballoc.offset + suballoc.size;
- --nextAlloc2ndIndex;
- }
- // We are at the end.
- else
- {
- if(lastOffset < size)
- {
- // There is free space from lastOffset to size.
- const VkDeviceSize unusedRangeSize = size - lastOffset;
- PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
- }
-
- // End of loop.
- lastOffset = size;
- }
- }
- }
-
- PrintDetailedMap_End(json);
+ const VkDeviceSize size = GetSize();
+ const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ const size_t suballoc1stCount = suballocations1st.size();
+ const size_t suballoc2ndCount = suballocations2nd.size();
+
+ // FIRST PASS
+
+ size_t unusedRangeCount = 0;
+ VkDeviceSize usedBytes = 0;
+
+ VkDeviceSize lastOffset = 0;
+
+ size_t alloc2ndCount = 0;
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+ size_t nextAlloc2ndIndex = 0;
+ while(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+ while(nextAlloc2ndIndex < suballoc2ndCount &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex < suballoc2ndCount)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ ++unusedRangeCount;
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ ++alloc2ndCount;
+ usedBytes += suballoc.size;
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+ ++unusedRangeCount;
+ }
+
+ // End of loop.
+ lastOffset = freeSpace2ndTo1stEnd;
+ }
+ }
+ }
+
+ size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
+ size_t alloc1stCount = 0;
+ const VkDeviceSize freeSpace1stTo2ndEnd =
+ m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
+ while(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ // Find next non-null allocation or move nextAllocIndex to the end.
+ while(nextAlloc1stIndex < suballoc1stCount &&
+ suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc1stIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc1stIndex < suballoc1stCount)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ ++unusedRangeCount;
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ ++alloc1stCount;
+ usedBytes += suballoc.size;
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc1stIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < size)
+ {
+ // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+ ++unusedRangeCount;
+ }
+
+ // End of loop.
+ lastOffset = freeSpace1stTo2ndEnd;
+ }
+ }
+
+ if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+ while(lastOffset < size)
+ {
+ // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+ while(nextAlloc2ndIndex != SIZE_MAX &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ --nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex != SIZE_MAX)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ ++unusedRangeCount;
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ ++alloc2ndCount;
+ usedBytes += suballoc.size;
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ --nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < size)
+ {
+ // There is free space from lastOffset to size.
+ ++unusedRangeCount;
+ }
+
+ // End of loop.
+ lastOffset = size;
+ }
+ }
+ }
+
+ const VkDeviceSize unusedBytes = size - usedBytes;
+ PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
+
+ // SECOND PASS
+ lastOffset = 0;
+
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
+ size_t nextAlloc2ndIndex = 0;
+ while(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+ while(nextAlloc2ndIndex < suballoc2ndCount &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex < suballoc2ndCount)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < freeSpace2ndTo1stEnd)
+ {
+ // There is free space from lastOffset to freeSpace2ndTo1stEnd.
+ const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
+ PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = freeSpace2ndTo1stEnd;
+ }
+ }
+ }
+
+ nextAlloc1stIndex = m_1stNullItemsBeginCount;
+ while(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ // Find next non-null allocation or move nextAllocIndex to the end.
+ while(nextAlloc1stIndex < suballoc1stCount &&
+ suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++nextAlloc1stIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc1stIndex < suballoc1stCount)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ ++nextAlloc1stIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < freeSpace1stTo2ndEnd)
+ {
+ // There is free space from lastOffset to freeSpace1stTo2ndEnd.
+ const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
+ PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = freeSpace1stTo2ndEnd;
+ }
+ }
+
+ if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
+ while(lastOffset < size)
+ {
+ // Find next non-null allocation or move nextAlloc2ndIndex to the end.
+ while(nextAlloc2ndIndex != SIZE_MAX &&
+ suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ --nextAlloc2ndIndex;
+ }
+
+ // Found non-null allocation.
+ if(nextAlloc2ndIndex != SIZE_MAX)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
+
+ // 1. Process free space before this allocation.
+ if(lastOffset < suballoc.offset)
+ {
+ // There is free space from lastOffset to suballoc.offset.
+ const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
+ PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+ }
+
+ // 2. Process this allocation.
+ // There is allocation with suballoc.offset, suballoc.size.
+ PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
+
+ // 3. Prepare for next iteration.
+ lastOffset = suballoc.offset + suballoc.size;
+ --nextAlloc2ndIndex;
+ }
+ // We are at the end.
+ else
+ {
+ if(lastOffset < size)
+ {
+ // There is free space from lastOffset to size.
+ const VkDeviceSize unusedRangeSize = size - lastOffset;
+ PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
+ }
+
+ // End of loop.
+ lastOffset = size;
+ }
+ }
+ }
+
+ PrintDetailedMap_End(json);
}
#endif // #if VMA_STATS_STRING_ENABLED
bool VmaBlockMetadata_Linear::CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(allocSize > 0);
- VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(pAllocationRequest != VMA_NULL);
- VMA_HEAVY_ASSERT(Validate());
- return upperAddress ?
- CreateAllocationRequest_UpperAddress(
- currentFrameIndex, frameInUseCount, bufferImageGranularity,
- allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
- CreateAllocationRequest_LowerAddress(
- currentFrameIndex, frameInUseCount, bufferImageGranularity,
- allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ VMA_ASSERT(allocSize > 0);
+ VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(pAllocationRequest != VMA_NULL);
+ VMA_HEAVY_ASSERT(Validate());
+ return upperAddress ?
+ CreateAllocationRequest_UpperAddress(
+ currentFrameIndex, frameInUseCount, bufferImageGranularity,
+ allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
+ CreateAllocationRequest_LowerAddress(
+ currentFrameIndex, frameInUseCount, bufferImageGranularity,
+ allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
}
bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- const VkDeviceSize size = GetSize();
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
- return false;
- }
-
- // Try to allocate before 2nd.back(), or end of block if 2nd.empty().
- if(allocSize > size)
- {
- return false;
- }
- VkDeviceSize resultBaseOffset = size - allocSize;
- if(!suballocations2nd.empty())
- {
- const VmaSuballocation& lastSuballoc = suballocations2nd.back();
- resultBaseOffset = lastSuballoc.offset - allocSize;
- if(allocSize > lastSuballoc.offset)
- {
- return false;
- }
- }
-
- // Start from offset equal to end of free space.
- VkDeviceSize resultOffset = resultBaseOffset;
-
- // Apply VMA_DEBUG_MARGIN at the end.
- if(VMA_DEBUG_MARGIN > 0)
- {
- if(resultOffset < VMA_DEBUG_MARGIN)
- {
- return false;
- }
- resultOffset -= VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- resultOffset = VmaAlignDown(resultOffset, allocAlignment);
-
- // Check next suballocations from 2nd for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1 && !suballocations2nd.empty())
- {
- bool bufferImageGranularityConflict = false;
- for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
- {
- const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
- }
- }
-
- // There is enough free space.
- const VkDeviceSize endOf1st = !suballocations1st.empty() ?
- suballocations1st.back().offset + suballocations1st.back().size :
- 0;
- if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
- {
- // Check previous suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1)
- {
- for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
- {
- const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- }
- }
-
- // All tests passed: Success.
- pAllocationRequest->offset = resultOffset;
- pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
- pAllocationRequest->sumItemSize = 0;
- // pAllocationRequest->item unused.
- pAllocationRequest->itemsToMakeLostCount = 0;
- pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
- return true;
- }
-
- return false;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ const VkDeviceSize size = GetSize();
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
+ return false;
+ }
+
+ // Try to allocate before 2nd.back(), or end of block if 2nd.empty().
+ if(allocSize > size)
+ {
+ return false;
+ }
+ VkDeviceSize resultBaseOffset = size - allocSize;
+ if(!suballocations2nd.empty())
+ {
+ const VmaSuballocation& lastSuballoc = suballocations2nd.back();
+ resultBaseOffset = lastSuballoc.offset - allocSize;
+ if(allocSize > lastSuballoc.offset)
+ {
+ return false;
+ }
+ }
+
+ // Start from offset equal to end of free space.
+ VkDeviceSize resultOffset = resultBaseOffset;
+
+ // Apply VMA_DEBUG_MARGIN at the end.
+ if(VMA_DEBUG_MARGIN > 0)
+ {
+ if(resultOffset < VMA_DEBUG_MARGIN)
+ {
+ return false;
+ }
+ resultOffset -= VMA_DEBUG_MARGIN;
+ }
+
+ // Apply alignment.
+ resultOffset = VmaAlignDown(resultOffset, allocAlignment);
+
+ // Check next suballocations from 2nd for BufferImageGranularity conflicts.
+ // Make bigger alignment if necessary.
+ if(bufferImageGranularity > 1 && !suballocations2nd.empty())
+ {
+ bool bufferImageGranularityConflict = false;
+ for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
+ {
+ const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
+ if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
+ {
+ bufferImageGranularityConflict = true;
+ break;
+ }
+ }
+ else
+ // Already on previous page.
+ break;
+ }
+ if(bufferImageGranularityConflict)
+ {
+ resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
+ }
+ }
+
+ // There is enough free space.
+ const VkDeviceSize endOf1st = !suballocations1st.empty() ?
+ suballocations1st.back().offset + suballocations1st.back().size :
+ 0;
+ if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
+ {
+ // Check previous suballocations for BufferImageGranularity conflicts.
+ // If conflict exists, allocation cannot be made here.
+ if(bufferImageGranularity > 1)
+ {
+ for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
+ {
+ const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
+ if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
+ {
+ return false;
+ }
+ }
+ else
+ {
+ // Already on next page.
+ break;
+ }
+ }
+ }
+
+ // All tests passed: Success.
+ pAllocationRequest->offset = resultOffset;
+ pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
+ pAllocationRequest->sumItemSize = 0;
+ // pAllocationRequest->item unused.
+ pAllocationRequest->itemsToMakeLostCount = 0;
+ pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
+ return true;
+ }
+
+ return false;
}
bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- const VkDeviceSize size = GetSize();
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- // Try to allocate at the end of 1st vector.
-
- VkDeviceSize resultBaseOffset = 0;
- if(!suballocations1st.empty())
- {
- const VmaSuballocation& lastSuballoc = suballocations1st.back();
- resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
- }
-
- // Start from offset equal to beginning of free space.
- VkDeviceSize resultOffset = resultBaseOffset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- resultOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- resultOffset = VmaAlignUp(resultOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1 && !suballocations1st.empty())
- {
- bool bufferImageGranularityConflict = false;
- for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
- {
- const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
- }
- }
-
- const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
- suballocations2nd.back().offset : size;
-
- // There is enough free space at the end after alignment.
- if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
- {
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
- {
- const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on previous page.
- break;
- }
- }
- }
-
- // All tests passed: Success.
- pAllocationRequest->offset = resultOffset;
- pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
- pAllocationRequest->sumItemSize = 0;
- // pAllocationRequest->item, customData unused.
- pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
- pAllocationRequest->itemsToMakeLostCount = 0;
- return true;
- }
- }
-
- // Wrap-around to end of 2nd vector. Try to allocate there, watching for the
- // beginning of 1st vector as the end of free space.
- if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- VMA_ASSERT(!suballocations1st.empty());
-
- VkDeviceSize resultBaseOffset = 0;
- if(!suballocations2nd.empty())
- {
- const VmaSuballocation& lastSuballoc = suballocations2nd.back();
- resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
- }
-
- // Start from offset equal to beginning of free space.
- VkDeviceSize resultOffset = resultBaseOffset;
-
- // Apply VMA_DEBUG_MARGIN at the beginning.
- if(VMA_DEBUG_MARGIN > 0)
- {
- resultOffset += VMA_DEBUG_MARGIN;
- }
-
- // Apply alignment.
- resultOffset = VmaAlignUp(resultOffset, allocAlignment);
-
- // Check previous suballocations for BufferImageGranularity conflicts.
- // Make bigger alignment if necessary.
- if(bufferImageGranularity > 1 && !suballocations2nd.empty())
- {
- bool bufferImageGranularityConflict = false;
- for(size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
- {
- const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
- if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
- {
- bufferImageGranularityConflict = true;
- break;
- }
- }
- else
- // Already on previous page.
- break;
- }
- if(bufferImageGranularityConflict)
- {
- resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
- }
- }
-
- pAllocationRequest->itemsToMakeLostCount = 0;
- pAllocationRequest->sumItemSize = 0;
- size_t index1st = m_1stNullItemsBeginCount;
-
- if(canMakeOtherLost)
- {
- while(index1st < suballocations1st.size() &&
- resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
- {
- // Next colliding allocation at the beginning of 1st vector found. Try to make it lost.
- const VmaSuballocation& suballoc = suballocations1st[index1st];
- if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- // No problem.
- }
- else
- {
- VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
- if(suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++pAllocationRequest->itemsToMakeLostCount;
- pAllocationRequest->sumItemSize += suballoc.size;
- }
- else
- {
- return false;
- }
- }
- ++index1st;
- }
-
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, we must mark more allocations lost or fail.
- if(bufferImageGranularity > 1)
- {
- while(index1st < suballocations1st.size())
- {
- const VmaSuballocation& suballoc = suballocations1st[index1st];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
- {
- if(suballoc.hAllocation != VK_NULL_HANDLE)
- {
- // Not checking actual VmaIsBufferImageGranularityConflict(allocType, suballoc.type).
- if(suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
- {
- ++pAllocationRequest->itemsToMakeLostCount;
- pAllocationRequest->sumItemSize += suballoc.size;
- }
- else
- {
- return false;
- }
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- ++index1st;
- }
- }
-
- // Special case: There is not enough room at the end for this allocation, even after making all from the 1st lost.
- if(index1st == suballocations1st.size() &&
- resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
- {
- // TODO: This is a known bug that it's not yet implemented and the allocation is failing.
- VMA_DEBUG_LOG("Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
- }
- }
-
- // There is enough free space at the end after alignment.
- if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
- (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
- {
- // Check next suballocations for BufferImageGranularity conflicts.
- // If conflict exists, allocation cannot be made here.
- if(bufferImageGranularity > 1)
- {
- for(size_t nextSuballocIndex = index1st;
- nextSuballocIndex < suballocations1st.size();
- nextSuballocIndex++)
- {
- const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
- if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
- {
- if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
- {
- return false;
- }
- }
- else
- {
- // Already on next page.
- break;
- }
- }
- }
-
- // All tests passed: Success.
- pAllocationRequest->offset = resultOffset;
- pAllocationRequest->sumFreeSize =
- (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
- - resultBaseOffset
- - pAllocationRequest->sumItemSize;
- pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
- // pAllocationRequest->item, customData unused.
- return true;
- }
- }
-
- return false;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ const VkDeviceSize size = GetSize();
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+ if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ // Try to allocate at the end of 1st vector.
+
+ VkDeviceSize resultBaseOffset = 0;
+ if(!suballocations1st.empty())
+ {
+ const VmaSuballocation& lastSuballoc = suballocations1st.back();
+ resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
+ }
+
+ // Start from offset equal to beginning of free space.
+ VkDeviceSize resultOffset = resultBaseOffset;
+
+ // Apply VMA_DEBUG_MARGIN at the beginning.
+ if(VMA_DEBUG_MARGIN > 0)
+ {
+ resultOffset += VMA_DEBUG_MARGIN;
+ }
+
+ // Apply alignment.
+ resultOffset = VmaAlignUp(resultOffset, allocAlignment);
+
+ // Check previous suballocations for BufferImageGranularity conflicts.
+ // Make bigger alignment if necessary.
+ if(bufferImageGranularity > 1 && !suballocations1st.empty())
+ {
+ bool bufferImageGranularityConflict = false;
+ for(size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
+ {
+ const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
+ if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+ {
+ bufferImageGranularityConflict = true;
+ break;
+ }
+ }
+ else
+ // Already on previous page.
+ break;
+ }
+ if(bufferImageGranularityConflict)
+ {
+ resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
+ }
+ }
+
+ const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
+ suballocations2nd.back().offset : size;
+
+ // There is enough free space at the end after alignment.
+ if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
+ {
+ // Check next suballocations for BufferImageGranularity conflicts.
+ // If conflict exists, allocation cannot be made here.
+ if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ for(size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
+ {
+ const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
+ if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+ {
+ return false;
+ }
+ }
+ else
+ {
+ // Already on previous page.
+ break;
+ }
+ }
+ }
+
+ // All tests passed: Success.
+ pAllocationRequest->offset = resultOffset;
+ pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
+ pAllocationRequest->sumItemSize = 0;
+ // pAllocationRequest->item, customData unused.
+ pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
+ pAllocationRequest->itemsToMakeLostCount = 0;
+ return true;
+ }
+ }
+
+ // Wrap-around to end of 2nd vector. Try to allocate there, watching for the
+ // beginning of 1st vector as the end of free space.
+ if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ VMA_ASSERT(!suballocations1st.empty());
+
+ VkDeviceSize resultBaseOffset = 0;
+ if(!suballocations2nd.empty())
+ {
+ const VmaSuballocation& lastSuballoc = suballocations2nd.back();
+ resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
+ }
+
+ // Start from offset equal to beginning of free space.
+ VkDeviceSize resultOffset = resultBaseOffset;
+
+ // Apply VMA_DEBUG_MARGIN at the beginning.
+ if(VMA_DEBUG_MARGIN > 0)
+ {
+ resultOffset += VMA_DEBUG_MARGIN;
+ }
+
+ // Apply alignment.
+ resultOffset = VmaAlignUp(resultOffset, allocAlignment);
+
+ // Check previous suballocations for BufferImageGranularity conflicts.
+ // Make bigger alignment if necessary.
+ if(bufferImageGranularity > 1 && !suballocations2nd.empty())
+ {
+ bool bufferImageGranularityConflict = false;
+ for(size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
+ {
+ const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
+ if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
+ {
+ bufferImageGranularityConflict = true;
+ break;
+ }
+ }
+ else
+ // Already on previous page.
+ break;
+ }
+ if(bufferImageGranularityConflict)
+ {
+ resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
+ }
+ }
+
+ pAllocationRequest->itemsToMakeLostCount = 0;
+ pAllocationRequest->sumItemSize = 0;
+ size_t index1st = m_1stNullItemsBeginCount;
+
+ if(canMakeOtherLost)
+ {
+ while(index1st < suballocations1st.size() &&
+ resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
+ {
+ // Next colliding allocation at the beginning of 1st vector found. Try to make it lost.
+ const VmaSuballocation& suballoc = suballocations1st[index1st];
+ if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ // No problem.
+ }
+ else
+ {
+ VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
+ if(suballoc.hAllocation->CanBecomeLost() &&
+ suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+ {
+ ++pAllocationRequest->itemsToMakeLostCount;
+ pAllocationRequest->sumItemSize += suballoc.size;
+ }
+ else
+ {
+ return false;
+ }
+ }
+ ++index1st;
+ }
+
+ // Check next suballocations for BufferImageGranularity conflicts.
+ // If conflict exists, we must mark more allocations lost or fail.
+ if(bufferImageGranularity > 1)
+ {
+ while(index1st < suballocations1st.size())
+ {
+ const VmaSuballocation& suballoc = suballocations1st[index1st];
+ if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
+ {
+ if(suballoc.hAllocation != VK_NULL_HANDLE)
+ {
+ // Not checking actual VmaIsBufferImageGranularityConflict(allocType, suballoc.type).
+ if(suballoc.hAllocation->CanBecomeLost() &&
+ suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
+ {
+ ++pAllocationRequest->itemsToMakeLostCount;
+ pAllocationRequest->sumItemSize += suballoc.size;
+ }
+ else
+ {
+ return false;
+ }
+ }
+ }
+ else
+ {
+ // Already on next page.
+ break;
+ }
+ ++index1st;
+ }
+ }
+
+ // Special case: There is not enough room at the end for this allocation, even after making all from the 1st lost.
+ if(index1st == suballocations1st.size() &&
+ resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
+ {
+ // TODO: This is a known bug that it's not yet implemented and the allocation is failing.
+ VMA_DEBUG_LOG("Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
+ }
+ }
+
+ // There is enough free space at the end after alignment.
+ if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
+ (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
+ {
+ // Check next suballocations for BufferImageGranularity conflicts.
+ // If conflict exists, allocation cannot be made here.
+ if(bufferImageGranularity > 1)
+ {
+ for(size_t nextSuballocIndex = index1st;
+ nextSuballocIndex < suballocations1st.size();
+ nextSuballocIndex++)
+ {
+ const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
+ if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
+ {
+ if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
+ {
+ return false;
+ }
+ }
+ else
+ {
+ // Already on next page.
+ break;
+ }
+ }
+ }
+
+ // All tests passed: Success.
+ pAllocationRequest->offset = resultOffset;
+ pAllocationRequest->sumFreeSize =
+ (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
+ - resultBaseOffset
+ - pAllocationRequest->sumItemSize;
+ pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
+ // pAllocationRequest->item, customData unused.
+ return true;
+ }
+ }
+
+ return false;
}
bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest)
-{
- if(pAllocationRequest->itemsToMakeLostCount == 0)
- {
- return true;
- }
-
- VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
-
- // We always start from 1st.
- SuballocationVectorType* suballocations = &AccessSuballocations1st();
- size_t index = m_1stNullItemsBeginCount;
- size_t madeLostCount = 0;
- while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
- {
- if(index == suballocations->size())
- {
- index = 0;
- // If we get to the end of 1st, we wrap around to beginning of 2nd of 1st.
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- suballocations = &AccessSuballocations2nd();
- }
- // else: m_2ndVectorMode == SECOND_VECTOR_EMPTY:
- // suballocations continues pointing at AccessSuballocations1st().
- VMA_ASSERT(!suballocations->empty());
- }
- VmaSuballocation& suballoc = (*suballocations)[index];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
- VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
- if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
- m_SumFreeSize += suballoc.size;
- if(suballocations == &AccessSuballocations1st())
- {
- ++m_1stNullItemsMiddleCount;
- }
- else
- {
- ++m_2ndNullItemsCount;
- }
- ++madeLostCount;
- }
- else
- {
- return false;
- }
- }
- ++index;
- }
-
- CleanupAfterFree();
- //VMA_HEAVY_ASSERT(Validate()); // Already called by ClanupAfterFree().
-
- return true;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ if(pAllocationRequest->itemsToMakeLostCount == 0)
+ {
+ return true;
+ }
+
+ VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
+
+ // We always start from 1st.
+ SuballocationVectorType* suballocations = &AccessSuballocations1st();
+ size_t index = m_1stNullItemsBeginCount;
+ size_t madeLostCount = 0;
+ while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
+ {
+ if(index == suballocations->size())
+ {
+ index = 0;
+ // If we get to the end of 1st, we wrap around to beginning of 2nd of 1st.
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ suballocations = &AccessSuballocations2nd();
+ }
+ // else: m_2ndVectorMode == SECOND_VECTOR_EMPTY:
+ // suballocations continues pointing at AccessSuballocations1st().
+ VMA_ASSERT(!suballocations->empty());
+ }
+ VmaSuballocation& suballoc = (*suballocations)[index];
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
+ VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
+ if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+ {
+ suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ suballoc.hAllocation = VK_NULL_HANDLE;
+ m_SumFreeSize += suballoc.size;
+ if(suballocations == &AccessSuballocations1st())
+ {
+ ++m_1stNullItemsMiddleCount;
+ }
+ else
+ {
+ ++m_2ndNullItemsCount;
+ }
+ ++madeLostCount;
+ }
+ else
+ {
+ return false;
+ }
+ }
+ ++index;
+ }
+
+ CleanupAfterFree();
+ //VMA_HEAVY_ASSERT(Validate()); // Already called by ClanupAfterFree().
+
+ return true;
}
uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
{
- uint32_t lostAllocationCount = 0;
-
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
- {
- VmaSuballocation& suballoc = suballocations1st[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
- suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
- ++m_1stNullItemsMiddleCount;
- m_SumFreeSize += suballoc.size;
- ++lostAllocationCount;
- }
- }
-
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
- {
- VmaSuballocation& suballoc = suballocations2nd[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
- suballoc.hAllocation->CanBecomeLost() &&
- suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
- {
- suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- suballoc.hAllocation = VK_NULL_HANDLE;
- ++m_2ndNullItemsCount;
- m_SumFreeSize += suballoc.size;
- ++lostAllocationCount;
- }
- }
-
- if(lostAllocationCount)
- {
- CleanupAfterFree();
- }
-
- return lostAllocationCount;
+ uint32_t lostAllocationCount = 0;
+
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
+ {
+ VmaSuballocation& suballoc = suballocations1st[i];
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
+ suballoc.hAllocation->CanBecomeLost() &&
+ suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+ {
+ suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ suballoc.hAllocation = VK_NULL_HANDLE;
+ ++m_1stNullItemsMiddleCount;
+ m_SumFreeSize += suballoc.size;
+ ++lostAllocationCount;
+ }
+ }
+
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
+ {
+ VmaSuballocation& suballoc = suballocations2nd[i];
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
+ suballoc.hAllocation->CanBecomeLost() &&
+ suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
+ {
+ suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ suballoc.hAllocation = VK_NULL_HANDLE;
+ ++m_2ndNullItemsCount;
+ m_SumFreeSize += suballoc.size;
+ ++lostAllocationCount;
+ }
+ }
+
+ if(lostAllocationCount)
+ {
+ CleanupAfterFree();
+ }
+
+ return lostAllocationCount;
}
VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData)
{
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
- {
- const VmaSuballocation& suballoc = suballocations1st[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- }
- }
-
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
- {
- const VmaSuballocation& suballoc = suballocations2nd[i];
- if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- }
- }
-
- return VK_SUCCESS;
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ for(size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
+ {
+ const VmaSuballocation& suballoc = suballocations1st[i];
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ }
+ }
+
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ for(size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
+ {
+ const VmaSuballocation& suballoc = suballocations2nd[i];
+ if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ }
+ }
+
+ return VK_SUCCESS;
}
void VmaBlockMetadata_Linear::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation)
-{
- const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
-
- switch(request.type)
- {
- case VmaAllocationRequestType::UpperAddress:
- {
- VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
- "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
- suballocations2nd.push_back(newSuballoc);
- m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
- }
- break;
- case VmaAllocationRequestType::EndOf1st:
- {
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
-
- VMA_ASSERT(suballocations1st.empty() ||
- request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
- // Check if it fits before the end of the block.
- VMA_ASSERT(request.offset + allocSize <= GetSize());
-
- suballocations1st.push_back(newSuballoc);
- }
- break;
- case VmaAllocationRequestType::EndOf2nd:
- {
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector.
- VMA_ASSERT(!suballocations1st.empty() &&
- request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- switch(m_2ndVectorMode)
- {
- case SECOND_VECTOR_EMPTY:
- // First allocation from second part ring buffer.
- VMA_ASSERT(suballocations2nd.empty());
- m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
- break;
- case SECOND_VECTOR_RING_BUFFER:
- // 2-part ring buffer is already started.
- VMA_ASSERT(!suballocations2nd.empty());
- break;
- case SECOND_VECTOR_DOUBLE_STACK:
- VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
- break;
- default:
- VMA_ASSERT(0);
- }
-
- suballocations2nd.push_back(newSuballoc);
- }
- break;
- default:
- VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR.");
- }
-
- m_SumFreeSize -= newSuballoc.size;
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation)
+{
+ const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
+
+ switch(request.type)
+ {
+ case VmaAllocationRequestType::UpperAddress:
+ {
+ VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
+ "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+ suballocations2nd.push_back(newSuballoc);
+ m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
+ }
+ break;
+ case VmaAllocationRequestType::EndOf1st:
+ {
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+
+ VMA_ASSERT(suballocations1st.empty() ||
+ request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
+ // Check if it fits before the end of the block.
+ VMA_ASSERT(request.offset + allocSize <= GetSize());
+
+ suballocations1st.push_back(newSuballoc);
+ }
+ break;
+ case VmaAllocationRequestType::EndOf2nd:
+ {
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector.
+ VMA_ASSERT(!suballocations1st.empty() &&
+ request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+ switch(m_2ndVectorMode)
+ {
+ case SECOND_VECTOR_EMPTY:
+ // First allocation from second part ring buffer.
+ VMA_ASSERT(suballocations2nd.empty());
+ m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
+ break;
+ case SECOND_VECTOR_RING_BUFFER:
+ // 2-part ring buffer is already started.
+ VMA_ASSERT(!suballocations2nd.empty());
+ break;
+ case SECOND_VECTOR_DOUBLE_STACK:
+ VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
+ break;
+ default:
+ VMA_ASSERT(0);
+ }
+
+ suballocations2nd.push_back(newSuballoc);
+ }
+ break;
+ default:
+ VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR.");
+ }
+
+ m_SumFreeSize -= newSuballoc.size;
}
void VmaBlockMetadata_Linear::Free(const VmaAllocation allocation)
{
- FreeAtOffset(allocation->GetOffset());
+ FreeAtOffset(allocation->GetOffset());
}
void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
{
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(!suballocations1st.empty())
- {
- // First allocation: Mark it as next empty at the beginning.
- VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
- if(firstSuballoc.offset == offset)
- {
- firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
- firstSuballoc.hAllocation = VK_NULL_HANDLE;
- m_SumFreeSize += firstSuballoc.size;
- ++m_1stNullItemsBeginCount;
- CleanupAfterFree();
- return;
- }
- }
-
- // Last allocation in 2-part ring buffer or top of upper stack (same logic).
- if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
- m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
- {
- VmaSuballocation& lastSuballoc = suballocations2nd.back();
- if(lastSuballoc.offset == offset)
- {
- m_SumFreeSize += lastSuballoc.size;
- suballocations2nd.pop_back();
- CleanupAfterFree();
- return;
- }
- }
- // Last allocation in 1st vector.
- else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
- {
- VmaSuballocation& lastSuballoc = suballocations1st.back();
- if(lastSuballoc.offset == offset)
- {
- m_SumFreeSize += lastSuballoc.size;
- suballocations1st.pop_back();
- CleanupAfterFree();
- return;
- }
- }
-
- // Item from the middle of 1st vector.
- {
- VmaSuballocation refSuballoc;
- refSuballoc.offset = offset;
- // Rest of members stays uninitialized intentionally for better performance.
- SuballocationVectorType::iterator it = VmaBinaryFindSorted(
- suballocations1st.begin() + m_1stNullItemsBeginCount,
- suballocations1st.end(),
- refSuballoc,
- VmaSuballocationOffsetLess());
- if(it != suballocations1st.end())
- {
- it->type = VMA_SUBALLOCATION_TYPE_FREE;
- it->hAllocation = VK_NULL_HANDLE;
- ++m_1stNullItemsMiddleCount;
- m_SumFreeSize += it->size;
- CleanupAfterFree();
- return;
- }
- }
-
- if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
- {
- // Item from the middle of 2nd vector.
- VmaSuballocation refSuballoc;
- refSuballoc.offset = offset;
- // Rest of members stays uninitialized intentionally for better performance.
- SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
- VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
- VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
- if(it != suballocations2nd.end())
- {
- it->type = VMA_SUBALLOCATION_TYPE_FREE;
- it->hAllocation = VK_NULL_HANDLE;
- ++m_2ndNullItemsCount;
- m_SumFreeSize += it->size;
- CleanupAfterFree();
- return;
- }
- }
-
- VMA_ASSERT(0 && "Allocation to free not found in linear allocator!");
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+ if(!suballocations1st.empty())
+ {
+ // First allocation: Mark it as next empty at the beginning.
+ VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
+ if(firstSuballoc.offset == offset)
+ {
+ firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
+ firstSuballoc.hAllocation = VK_NULL_HANDLE;
+ m_SumFreeSize += firstSuballoc.size;
+ ++m_1stNullItemsBeginCount;
+ CleanupAfterFree();
+ return;
+ }
+ }
+
+ // Last allocation in 2-part ring buffer or top of upper stack (same logic).
+ if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
+ m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
+ {
+ VmaSuballocation& lastSuballoc = suballocations2nd.back();
+ if(lastSuballoc.offset == offset)
+ {
+ m_SumFreeSize += lastSuballoc.size;
+ suballocations2nd.pop_back();
+ CleanupAfterFree();
+ return;
+ }
+ }
+ // Last allocation in 1st vector.
+ else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
+ {
+ VmaSuballocation& lastSuballoc = suballocations1st.back();
+ if(lastSuballoc.offset == offset)
+ {
+ m_SumFreeSize += lastSuballoc.size;
+ suballocations1st.pop_back();
+ CleanupAfterFree();
+ return;
+ }
+ }
+
+ // Item from the middle of 1st vector.
+ {
+ VmaSuballocation refSuballoc;
+ refSuballoc.offset = offset;
+ // Rest of members stays uninitialized intentionally for better performance.
+ SuballocationVectorType::iterator it = VmaBinaryFindSorted(
+ suballocations1st.begin() + m_1stNullItemsBeginCount,
+ suballocations1st.end(),
+ refSuballoc,
+ VmaSuballocationOffsetLess());
+ if(it != suballocations1st.end())
+ {
+ it->type = VMA_SUBALLOCATION_TYPE_FREE;
+ it->hAllocation = VK_NULL_HANDLE;
+ ++m_1stNullItemsMiddleCount;
+ m_SumFreeSize += it->size;
+ CleanupAfterFree();
+ return;
+ }
+ }
+
+ if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
+ {
+ // Item from the middle of 2nd vector.
+ VmaSuballocation refSuballoc;
+ refSuballoc.offset = offset;
+ // Rest of members stays uninitialized intentionally for better performance.
+ SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
+ VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
+ VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
+ if(it != suballocations2nd.end())
+ {
+ it->type = VMA_SUBALLOCATION_TYPE_FREE;
+ it->hAllocation = VK_NULL_HANDLE;
+ ++m_2ndNullItemsCount;
+ m_SumFreeSize += it->size;
+ CleanupAfterFree();
+ return;
+ }
+ }
+
+ VMA_ASSERT(0 && "Allocation to free not found in linear allocator!");
}
bool VmaBlockMetadata_Linear::ShouldCompact1st() const
{
- const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
- const size_t suballocCount = AccessSuballocations1st().size();
- return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
+ const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
+ const size_t suballocCount = AccessSuballocations1st().size();
+ return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
}
void VmaBlockMetadata_Linear::CleanupAfterFree()
{
- SuballocationVectorType& suballocations1st = AccessSuballocations1st();
- SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
-
- if(IsEmpty())
- {
- suballocations1st.clear();
- suballocations2nd.clear();
- m_1stNullItemsBeginCount = 0;
- m_1stNullItemsMiddleCount = 0;
- m_2ndNullItemsCount = 0;
- m_2ndVectorMode = SECOND_VECTOR_EMPTY;
- }
- else
- {
- const size_t suballoc1stCount = suballocations1st.size();
- const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
- VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
-
- // Find more null items at the beginning of 1st vector.
- while(m_1stNullItemsBeginCount < suballoc1stCount &&
- suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
- {
- ++m_1stNullItemsBeginCount;
- --m_1stNullItemsMiddleCount;
- }
-
- // Find more null items at the end of 1st vector.
- while(m_1stNullItemsMiddleCount > 0 &&
- suballocations1st.back().hAllocation == VK_NULL_HANDLE)
- {
- --m_1stNullItemsMiddleCount;
- suballocations1st.pop_back();
- }
-
- // Find more null items at the end of 2nd vector.
- while(m_2ndNullItemsCount > 0 &&
- suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
- {
- --m_2ndNullItemsCount;
- suballocations2nd.pop_back();
- }
-
- // Find more null items at the beginning of 2nd vector.
- while(m_2ndNullItemsCount > 0 &&
- suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
- {
- --m_2ndNullItemsCount;
- VmaVectorRemove(suballocations2nd, 0);
- }
-
- if(ShouldCompact1st())
- {
- const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
- size_t srcIndex = m_1stNullItemsBeginCount;
- for(size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
- {
- while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
- {
- ++srcIndex;
- }
- if(dstIndex != srcIndex)
- {
- suballocations1st[dstIndex] = suballocations1st[srcIndex];
- }
- ++srcIndex;
- }
- suballocations1st.resize(nonNullItemCount);
- m_1stNullItemsBeginCount = 0;
- m_1stNullItemsMiddleCount = 0;
- }
-
- // 2nd vector became empty.
- if(suballocations2nd.empty())
- {
- m_2ndVectorMode = SECOND_VECTOR_EMPTY;
- }
-
- // 1st vector became empty.
- if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
- {
- suballocations1st.clear();
- m_1stNullItemsBeginCount = 0;
-
- if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
- {
- // Swap 1st with 2nd. Now 2nd is empty.
- m_2ndVectorMode = SECOND_VECTOR_EMPTY;
- m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
- while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
- suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
- {
- ++m_1stNullItemsBeginCount;
- --m_1stNullItemsMiddleCount;
- }
- m_2ndNullItemsCount = 0;
- m_1stVectorIndex ^= 1;
- }
- }
- }
-
- VMA_HEAVY_ASSERT(Validate());
+ SuballocationVectorType& suballocations1st = AccessSuballocations1st();
+ SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
+
+ if(IsEmpty())
+ {
+ suballocations1st.clear();
+ suballocations2nd.clear();
+ m_1stNullItemsBeginCount = 0;
+ m_1stNullItemsMiddleCount = 0;
+ m_2ndNullItemsCount = 0;
+ m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+ }
+ else
+ {
+ const size_t suballoc1stCount = suballocations1st.size();
+ const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
+ VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
+
+ // Find more null items at the beginning of 1st vector.
+ while(m_1stNullItemsBeginCount < suballoc1stCount &&
+ suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
+ {
+ ++m_1stNullItemsBeginCount;
+ --m_1stNullItemsMiddleCount;
+ }
+
+ // Find more null items at the end of 1st vector.
+ while(m_1stNullItemsMiddleCount > 0 &&
+ suballocations1st.back().hAllocation == VK_NULL_HANDLE)
+ {
+ --m_1stNullItemsMiddleCount;
+ suballocations1st.pop_back();
+ }
+
+ // Find more null items at the end of 2nd vector.
+ while(m_2ndNullItemsCount > 0 &&
+ suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
+ {
+ --m_2ndNullItemsCount;
+ suballocations2nd.pop_back();
+ }
+
+ // Find more null items at the beginning of 2nd vector.
+ while(m_2ndNullItemsCount > 0 &&
+ suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
+ {
+ --m_2ndNullItemsCount;
+ VmaVectorRemove(suballocations2nd, 0);
+ }
+
+ if(ShouldCompact1st())
+ {
+ const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
+ size_t srcIndex = m_1stNullItemsBeginCount;
+ for(size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
+ {
+ while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
+ {
+ ++srcIndex;
+ }
+ if(dstIndex != srcIndex)
+ {
+ suballocations1st[dstIndex] = suballocations1st[srcIndex];
+ }
+ ++srcIndex;
+ }
+ suballocations1st.resize(nonNullItemCount);
+ m_1stNullItemsBeginCount = 0;
+ m_1stNullItemsMiddleCount = 0;
+ }
+
+ // 2nd vector became empty.
+ if(suballocations2nd.empty())
+ {
+ m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+ }
+
+ // 1st vector became empty.
+ if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
+ {
+ suballocations1st.clear();
+ m_1stNullItemsBeginCount = 0;
+
+ if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
+ {
+ // Swap 1st with 2nd. Now 2nd is empty.
+ m_2ndVectorMode = SECOND_VECTOR_EMPTY;
+ m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
+ while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
+ suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
+ {
+ ++m_1stNullItemsBeginCount;
+ --m_1stNullItemsMiddleCount;
+ }
+ m_2ndNullItemsCount = 0;
+ m_1stVectorIndex ^= 1;
+ }
+ }
+ }
+
+ VMA_HEAVY_ASSERT(Validate());
}
@@ -11057,565 +10895,565 @@ void VmaBlockMetadata_Linear::CleanupAfterFree()
// class VmaBlockMetadata_Buddy
VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(VmaAllocator hAllocator) :
- VmaBlockMetadata(hAllocator),
- m_Root(VMA_NULL),
- m_AllocationCount(0),
- m_FreeCount(1),
- m_SumFreeSize(0)
+ VmaBlockMetadata(hAllocator),
+ m_Root(VMA_NULL),
+ m_AllocationCount(0),
+ m_FreeCount(1),
+ m_SumFreeSize(0)
{
- memset(m_FreeList, 0, sizeof(m_FreeList));
+ memset(m_FreeList, 0, sizeof(m_FreeList));
}
VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
{
- DeleteNode(m_Root);
+ DeleteNode(m_Root);
}
void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
{
- VmaBlockMetadata::Init(size);
+ VmaBlockMetadata::Init(size);
- m_UsableSize = VmaPrevPow2(size);
- m_SumFreeSize = m_UsableSize;
+ m_UsableSize = VmaPrevPow2(size);
+ m_SumFreeSize = m_UsableSize;
- // Calculate m_LevelCount.
- m_LevelCount = 1;
- while(m_LevelCount < MAX_LEVELS &&
- LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
- {
- ++m_LevelCount;
- }
+ // Calculate m_LevelCount.
+ m_LevelCount = 1;
+ while(m_LevelCount < MAX_LEVELS &&
+ LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
+ {
+ ++m_LevelCount;
+ }
- Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
- rootNode->offset = 0;
- rootNode->type = Node::TYPE_FREE;
- rootNode->parent = VMA_NULL;
- rootNode->buddy = VMA_NULL;
+ Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
+ rootNode->offset = 0;
+ rootNode->type = Node::TYPE_FREE;
+ rootNode->parent = VMA_NULL;
+ rootNode->buddy = VMA_NULL;
- m_Root = rootNode;
- AddToFreeListFront(0, rootNode);
+ m_Root = rootNode;
+ AddToFreeListFront(0, rootNode);
}
bool VmaBlockMetadata_Buddy::Validate() const
{
- // Validate tree.
- ValidationContext ctx;
- if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
- {
- VMA_VALIDATE(false && "ValidateNode failed.");
- }
- VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
- VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
-
- // Validate free node lists.
- for(uint32_t level = 0; level < m_LevelCount; ++level)
- {
- VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
- m_FreeList[level].front->free.prev == VMA_NULL);
-
- for(Node* node = m_FreeList[level].front;
- node != VMA_NULL;
- node = node->free.next)
- {
- VMA_VALIDATE(node->type == Node::TYPE_FREE);
-
- if(node->free.next == VMA_NULL)
- {
- VMA_VALIDATE(m_FreeList[level].back == node);
- }
- else
- {
- VMA_VALIDATE(node->free.next->free.prev == node);
- }
- }
- }
-
- // Validate that free lists ar higher levels are empty.
- for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
- {
- VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
- }
-
- return true;
+ // Validate tree.
+ ValidationContext ctx;
+ if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
+ {
+ VMA_VALIDATE(false && "ValidateNode failed.");
+ }
+ VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
+ VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
+
+ // Validate free node lists.
+ for(uint32_t level = 0; level < m_LevelCount; ++level)
+ {
+ VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
+ m_FreeList[level].front->free.prev == VMA_NULL);
+
+ for(Node* node = m_FreeList[level].front;
+ node != VMA_NULL;
+ node = node->free.next)
+ {
+ VMA_VALIDATE(node->type == Node::TYPE_FREE);
+
+ if(node->free.next == VMA_NULL)
+ {
+ VMA_VALIDATE(m_FreeList[level].back == node);
+ }
+ else
+ {
+ VMA_VALIDATE(node->free.next->free.prev == node);
+ }
+ }
+ }
+
+ // Validate that free lists ar higher levels are empty.
+ for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
+ {
+ VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
+ }
+
+ return true;
}
VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax() const
{
- for(uint32_t level = 0; level < m_LevelCount; ++level)
- {
- if(m_FreeList[level].front != VMA_NULL)
- {
- return LevelToNodeSize(level);
- }
- }
- return 0;
+ for(uint32_t level = 0; level < m_LevelCount; ++level)
+ {
+ if(m_FreeList[level].front != VMA_NULL)
+ {
+ return LevelToNodeSize(level);
+ }
+ }
+ return 0;
}
void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(VmaStatInfo& outInfo) const
{
- const VkDeviceSize unusableSize = GetUnusableSize();
+ const VkDeviceSize unusableSize = GetUnusableSize();
- outInfo.blockCount = 1;
+ outInfo.blockCount = 1;
- outInfo.allocationCount = outInfo.unusedRangeCount = 0;
- outInfo.usedBytes = outInfo.unusedBytes = 0;
+ outInfo.allocationCount = outInfo.unusedRangeCount = 0;
+ outInfo.usedBytes = outInfo.unusedBytes = 0;
- outInfo.allocationSizeMax = outInfo.unusedRangeSizeMax = 0;
- outInfo.allocationSizeMin = outInfo.unusedRangeSizeMin = UINT64_MAX;
- outInfo.allocationSizeAvg = outInfo.unusedRangeSizeAvg = 0; // Unused.
+ outInfo.allocationSizeMax = outInfo.unusedRangeSizeMax = 0;
+ outInfo.allocationSizeMin = outInfo.unusedRangeSizeMin = UINT64_MAX;
+ outInfo.allocationSizeAvg = outInfo.unusedRangeSizeAvg = 0; // Unused.
- CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
+ CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
- if(unusableSize > 0)
- {
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusableSize;
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusableSize);
- outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusableSize);
- }
+ if(unusableSize > 0)
+ {
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusableSize;
+ outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusableSize);
+ outInfo.unusedRangeSizeMin = VMA_MIN(outInfo.unusedRangeSizeMin, unusableSize);
+ }
}
void VmaBlockMetadata_Buddy::AddPoolStats(VmaPoolStats& inoutStats) const
{
- const VkDeviceSize unusableSize = GetUnusableSize();
+ const VkDeviceSize unusableSize = GetUnusableSize();
- inoutStats.size += GetSize();
- inoutStats.unusedSize += m_SumFreeSize + unusableSize;
- inoutStats.allocationCount += m_AllocationCount;
- inoutStats.unusedRangeCount += m_FreeCount;
- inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
+ inoutStats.size += GetSize();
+ inoutStats.unusedSize += m_SumFreeSize + unusableSize;
+ inoutStats.allocationCount += m_AllocationCount;
+ inoutStats.unusedRangeCount += m_FreeCount;
+ inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, GetUnusedRangeSizeMax());
- if(unusableSize > 0)
- {
- ++inoutStats.unusedRangeCount;
- // Not updating inoutStats.unusedRangeSizeMax with unusableSize because this space is not available for allocations.
- }
+ if(unusableSize > 0)
+ {
+ ++inoutStats.unusedRangeCount;
+ // Not updating inoutStats.unusedRangeSizeMax with unusableSize because this space is not available for allocations.
+ }
}
#if VMA_STATS_STRING_ENABLED
void VmaBlockMetadata_Buddy::PrintDetailedMap(class VmaJsonWriter& json) const
{
- // TODO optimize
- VmaStatInfo stat;
- CalcAllocationStatInfo(stat);
+ // TODO optimize
+ VmaStatInfo stat;
+ CalcAllocationStatInfo(stat);
- PrintDetailedMap_Begin(
- json,
- stat.unusedBytes,
- stat.allocationCount,
- stat.unusedRangeCount);
+ PrintDetailedMap_Begin(
+ json,
+ stat.unusedBytes,
+ stat.allocationCount,
+ stat.unusedRangeCount);
- PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
+ PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
- const VkDeviceSize unusableSize = GetUnusableSize();
- if(unusableSize > 0)
- {
- PrintDetailedMap_UnusedRange(json,
- m_UsableSize, // offset
- unusableSize); // size
- }
+ const VkDeviceSize unusableSize = GetUnusableSize();
+ if(unusableSize > 0)
+ {
+ PrintDetailedMap_UnusedRange(json,
+ m_UsableSize, // offset
+ unusableSize); // size
+ }
- PrintDetailedMap_End(json);
+ PrintDetailedMap_End(json);
}
#endif // #if VMA_STATS_STRING_ENABLED
bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VkDeviceSize bufferImageGranularity,
- VkDeviceSize allocSize,
- VkDeviceSize allocAlignment,
- bool upperAddress,
- VmaSuballocationType allocType,
- bool canMakeOtherLost,
- uint32_t strategy,
- VmaAllocationRequest* pAllocationRequest)
-{
- VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
-
- // Simple way to respect bufferImageGranularity. May be optimized some day.
- // Whenever it might be an OPTIMAL image...
- if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
- allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
- allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
- {
- allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
- allocSize = VMA_MAX(allocSize, bufferImageGranularity);
- }
-
- if(allocSize > m_UsableSize)
- {
- return false;
- }
-
- const uint32_t targetLevel = AllocSizeToLevel(allocSize);
- for(uint32_t level = targetLevel + 1; level--; )
- {
- for(Node* freeNode = m_FreeList[level].front;
- freeNode != VMA_NULL;
- freeNode = freeNode->free.next)
- {
- if(freeNode->offset % allocAlignment == 0)
- {
- pAllocationRequest->type = VmaAllocationRequestType::Normal;
- pAllocationRequest->offset = freeNode->offset;
- pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
- pAllocationRequest->sumItemSize = 0;
- pAllocationRequest->itemsToMakeLostCount = 0;
- pAllocationRequest->customData = (void*)(uintptr_t)level;
- return true;
- }
- }
- }
-
- return false;
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VkDeviceSize bufferImageGranularity,
+ VkDeviceSize allocSize,
+ VkDeviceSize allocAlignment,
+ bool upperAddress,
+ VmaSuballocationType allocType,
+ bool canMakeOtherLost,
+ uint32_t strategy,
+ VmaAllocationRequest* pAllocationRequest)
+{
+ VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
+
+ // Simple way to respect bufferImageGranularity. May be optimized some day.
+ // Whenever it might be an OPTIMAL image...
+ if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
+ allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
+ allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
+ {
+ allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
+ allocSize = VMA_MAX(allocSize, bufferImageGranularity);
+ }
+
+ if(allocSize > m_UsableSize)
+ {
+ return false;
+ }
+
+ const uint32_t targetLevel = AllocSizeToLevel(allocSize);
+ for(uint32_t level = targetLevel + 1; level--; )
+ {
+ for(Node* freeNode = m_FreeList[level].front;
+ freeNode != VMA_NULL;
+ freeNode = freeNode->free.next)
+ {
+ if(freeNode->offset % allocAlignment == 0)
+ {
+ pAllocationRequest->type = VmaAllocationRequestType::Normal;
+ pAllocationRequest->offset = freeNode->offset;
+ pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
+ pAllocationRequest->sumItemSize = 0;
+ pAllocationRequest->itemsToMakeLostCount = 0;
+ pAllocationRequest->customData = (void*)(uintptr_t)level;
+ return true;
+ }
+ }
+ }
+
+ return false;
}
bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
- uint32_t currentFrameIndex,
- uint32_t frameInUseCount,
- VmaAllocationRequest* pAllocationRequest)
+ uint32_t currentFrameIndex,
+ uint32_t frameInUseCount,
+ VmaAllocationRequest* pAllocationRequest)
{
- /*
- Lost allocations are not supported in buddy allocator at the moment.
- Support might be added in the future.
- */
- return pAllocationRequest->itemsToMakeLostCount == 0;
+ /*
+ Lost allocations are not supported in buddy allocator at the moment.
+ Support might be added in the future.
+ */
+ return pAllocationRequest->itemsToMakeLostCount == 0;
}
uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
{
- /*
- Lost allocations are not supported in buddy allocator at the moment.
- Support might be added in the future.
- */
- return 0;
+ /*
+ Lost allocations are not supported in buddy allocator at the moment.
+ Support might be added in the future.
+ */
+ return 0;
}
void VmaBlockMetadata_Buddy::Alloc(
- const VmaAllocationRequest& request,
- VmaSuballocationType type,
- VkDeviceSize allocSize,
- VmaAllocation hAllocation)
-{
- VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
-
- const uint32_t targetLevel = AllocSizeToLevel(allocSize);
- uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
-
- Node* currNode = m_FreeList[currLevel].front;
- VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
- while(currNode->offset != request.offset)
- {
- currNode = currNode->free.next;
- VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
- }
-
- // Go down, splitting free nodes.
- while(currLevel < targetLevel)
- {
- // currNode is already first free node at currLevel.
- // Remove it from list of free nodes at this currLevel.
- RemoveFromFreeList(currLevel, currNode);
-
- const uint32_t childrenLevel = currLevel + 1;
-
- // Create two free sub-nodes.
- Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
- Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
-
- leftChild->offset = currNode->offset;
- leftChild->type = Node::TYPE_FREE;
- leftChild->parent = currNode;
- leftChild->buddy = rightChild;
-
- rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
- rightChild->type = Node::TYPE_FREE;
- rightChild->parent = currNode;
- rightChild->buddy = leftChild;
-
- // Convert current currNode to split type.
- currNode->type = Node::TYPE_SPLIT;
- currNode->split.leftChild = leftChild;
-
- // Add child nodes to free list. Order is important!
- AddToFreeListFront(childrenLevel, rightChild);
- AddToFreeListFront(childrenLevel, leftChild);
-
- ++m_FreeCount;
- //m_SumFreeSize -= LevelToNodeSize(currLevel) % 2; // Useful only when level node sizes can be non power of 2.
- ++currLevel;
- currNode = m_FreeList[currLevel].front;
-
- /*
- We can be sure that currNode, as left child of node previously split,
- also fullfills the alignment requirement.
- */
- }
-
- // Remove from free list.
- VMA_ASSERT(currLevel == targetLevel &&
- currNode != VMA_NULL &&
- currNode->type == Node::TYPE_FREE);
- RemoveFromFreeList(currLevel, currNode);
-
- // Convert to allocation node.
- currNode->type = Node::TYPE_ALLOCATION;
- currNode->allocation.alloc = hAllocation;
-
- ++m_AllocationCount;
- --m_FreeCount;
- m_SumFreeSize -= allocSize;
+ const VmaAllocationRequest& request,
+ VmaSuballocationType type,
+ VkDeviceSize allocSize,
+ VmaAllocation hAllocation)
+{
+ VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
+
+ const uint32_t targetLevel = AllocSizeToLevel(allocSize);
+ uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
+
+ Node* currNode = m_FreeList[currLevel].front;
+ VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
+ while(currNode->offset != request.offset)
+ {
+ currNode = currNode->free.next;
+ VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
+ }
+
+ // Go down, splitting free nodes.
+ while(currLevel < targetLevel)
+ {
+ // currNode is already first free node at currLevel.
+ // Remove it from list of free nodes at this currLevel.
+ RemoveFromFreeList(currLevel, currNode);
+
+ const uint32_t childrenLevel = currLevel + 1;
+
+ // Create two free sub-nodes.
+ Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
+ Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
+
+ leftChild->offset = currNode->offset;
+ leftChild->type = Node::TYPE_FREE;
+ leftChild->parent = currNode;
+ leftChild->buddy = rightChild;
+
+ rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
+ rightChild->type = Node::TYPE_FREE;
+ rightChild->parent = currNode;
+ rightChild->buddy = leftChild;
+
+ // Convert current currNode to split type.
+ currNode->type = Node::TYPE_SPLIT;
+ currNode->split.leftChild = leftChild;
+
+ // Add child nodes to free list. Order is important!
+ AddToFreeListFront(childrenLevel, rightChild);
+ AddToFreeListFront(childrenLevel, leftChild);
+
+ ++m_FreeCount;
+ //m_SumFreeSize -= LevelToNodeSize(currLevel) % 2; // Useful only when level node sizes can be non power of 2.
+ ++currLevel;
+ currNode = m_FreeList[currLevel].front;
+
+ /*
+ We can be sure that currNode, as left child of node previously split,
+ also fullfills the alignment requirement.
+ */
+ }
+
+ // Remove from free list.
+ VMA_ASSERT(currLevel == targetLevel &&
+ currNode != VMA_NULL &&
+ currNode->type == Node::TYPE_FREE);
+ RemoveFromFreeList(currLevel, currNode);
+
+ // Convert to allocation node.
+ currNode->type = Node::TYPE_ALLOCATION;
+ currNode->allocation.alloc = hAllocation;
+
+ ++m_AllocationCount;
+ --m_FreeCount;
+ m_SumFreeSize -= allocSize;
}
void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
{
- if(node->type == Node::TYPE_SPLIT)
- {
- DeleteNode(node->split.leftChild->buddy);
- DeleteNode(node->split.leftChild);
- }
+ if(node->type == Node::TYPE_SPLIT)
+ {
+ DeleteNode(node->split.leftChild->buddy);
+ DeleteNode(node->split.leftChild);
+ }
- vma_delete(GetAllocationCallbacks(), node);
+ vma_delete(GetAllocationCallbacks(), node);
}
bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const
{
- VMA_VALIDATE(level < m_LevelCount);
- VMA_VALIDATE(curr->parent == parent);
- VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
- VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
- switch(curr->type)
- {
- case Node::TYPE_FREE:
- // curr->free.prev, next are validated separately.
- ctx.calculatedSumFreeSize += levelNodeSize;
- ++ctx.calculatedFreeCount;
- break;
- case Node::TYPE_ALLOCATION:
- ++ctx.calculatedAllocationCount;
- ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
- VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
- break;
- case Node::TYPE_SPLIT:
- {
- const uint32_t childrenLevel = level + 1;
- const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
- const Node* const leftChild = curr->split.leftChild;
- VMA_VALIDATE(leftChild != VMA_NULL);
- VMA_VALIDATE(leftChild->offset == curr->offset);
- if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
- {
- VMA_VALIDATE(false && "ValidateNode for left child failed.");
- }
- const Node* const rightChild = leftChild->buddy;
- VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
- if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
- {
- VMA_VALIDATE(false && "ValidateNode for right child failed.");
- }
- }
- break;
- default:
- return false;
- }
-
- return true;
+ VMA_VALIDATE(level < m_LevelCount);
+ VMA_VALIDATE(curr->parent == parent);
+ VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
+ VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
+ switch(curr->type)
+ {
+ case Node::TYPE_FREE:
+ // curr->free.prev, next are validated separately.
+ ctx.calculatedSumFreeSize += levelNodeSize;
+ ++ctx.calculatedFreeCount;
+ break;
+ case Node::TYPE_ALLOCATION:
+ ++ctx.calculatedAllocationCount;
+ ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
+ VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
+ break;
+ case Node::TYPE_SPLIT:
+ {
+ const uint32_t childrenLevel = level + 1;
+ const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
+ const Node* const leftChild = curr->split.leftChild;
+ VMA_VALIDATE(leftChild != VMA_NULL);
+ VMA_VALIDATE(leftChild->offset == curr->offset);
+ if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
+ {
+ VMA_VALIDATE(false && "ValidateNode for left child failed.");
+ }
+ const Node* const rightChild = leftChild->buddy;
+ VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
+ if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
+ {
+ VMA_VALIDATE(false && "ValidateNode for right child failed.");
+ }
+ }
+ break;
+ default:
+ return false;
+ }
+
+ return true;
}
uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize) const
{
- // I know this could be optimized somehow e.g. by using std::log2p1 from C++20.
- uint32_t level = 0;
- VkDeviceSize currLevelNodeSize = m_UsableSize;
- VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
- while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
- {
- ++level;
- currLevelNodeSize = nextLevelNodeSize;
- nextLevelNodeSize = currLevelNodeSize >> 1;
- }
- return level;
+ // I know this could be optimized somehow e.g. by using std::log2p1 from C++20.
+ uint32_t level = 0;
+ VkDeviceSize currLevelNodeSize = m_UsableSize;
+ VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
+ while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
+ {
+ ++level;
+ currLevelNodeSize = nextLevelNodeSize;
+ nextLevelNodeSize = currLevelNodeSize >> 1;
+ }
+ return level;
}
void VmaBlockMetadata_Buddy::FreeAtOffset(VmaAllocation alloc, VkDeviceSize offset)
{
- // Find node and level.
- Node* node = m_Root;
- VkDeviceSize nodeOffset = 0;
- uint32_t level = 0;
- VkDeviceSize levelNodeSize = LevelToNodeSize(0);
- while(node->type == Node::TYPE_SPLIT)
- {
- const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
- if(offset < nodeOffset + nextLevelSize)
- {
- node = node->split.leftChild;
- }
- else
- {
- node = node->split.leftChild->buddy;
- nodeOffset += nextLevelSize;
- }
- ++level;
- levelNodeSize = nextLevelSize;
- }
-
- VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
- VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
-
- ++m_FreeCount;
- --m_AllocationCount;
- m_SumFreeSize += alloc->GetSize();
-
- node->type = Node::TYPE_FREE;
-
- // Join free nodes if possible.
- while(level > 0 && node->buddy->type == Node::TYPE_FREE)
- {
- RemoveFromFreeList(level, node->buddy);
- Node* const parent = node->parent;
-
- vma_delete(GetAllocationCallbacks(), node->buddy);
- vma_delete(GetAllocationCallbacks(), node);
- parent->type = Node::TYPE_FREE;
-
- node = parent;
- --level;
- //m_SumFreeSize += LevelToNodeSize(level) % 2; // Useful only when level node sizes can be non power of 2.
- --m_FreeCount;
- }
-
- AddToFreeListFront(level, node);
+ // Find node and level.
+ Node* node = m_Root;
+ VkDeviceSize nodeOffset = 0;
+ uint32_t level = 0;
+ VkDeviceSize levelNodeSize = LevelToNodeSize(0);
+ while(node->type == Node::TYPE_SPLIT)
+ {
+ const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
+ if(offset < nodeOffset + nextLevelSize)
+ {
+ node = node->split.leftChild;
+ }
+ else
+ {
+ node = node->split.leftChild->buddy;
+ nodeOffset += nextLevelSize;
+ }
+ ++level;
+ levelNodeSize = nextLevelSize;
+ }
+
+ VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
+ VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
+
+ ++m_FreeCount;
+ --m_AllocationCount;
+ m_SumFreeSize += alloc->GetSize();
+
+ node->type = Node::TYPE_FREE;
+
+ // Join free nodes if possible.
+ while(level > 0 && node->buddy->type == Node::TYPE_FREE)
+ {
+ RemoveFromFreeList(level, node->buddy);
+ Node* const parent = node->parent;
+
+ vma_delete(GetAllocationCallbacks(), node->buddy);
+ vma_delete(GetAllocationCallbacks(), node);
+ parent->type = Node::TYPE_FREE;
+
+ node = parent;
+ --level;
+ //m_SumFreeSize += LevelToNodeSize(level) % 2; // Useful only when level node sizes can be non power of 2.
+ --m_FreeCount;
+ }
+
+ AddToFreeListFront(level, node);
}
void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(VmaStatInfo& outInfo, const Node* node, VkDeviceSize levelNodeSize) const
{
- switch(node->type)
- {
- case Node::TYPE_FREE:
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += levelNodeSize;
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, levelNodeSize);
- outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, levelNodeSize);
- break;
- case Node::TYPE_ALLOCATION:
- {
- const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
- ++outInfo.allocationCount;
- outInfo.usedBytes += allocSize;
- outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, allocSize);
- outInfo.allocationSizeMin = VMA_MAX(outInfo.allocationSizeMin, allocSize);
-
- const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
- if(unusedRangeSize > 0)
- {
- ++outInfo.unusedRangeCount;
- outInfo.unusedBytes += unusedRangeSize;
- outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusedRangeSize);
- outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, unusedRangeSize);
- }
- }
- break;
- case Node::TYPE_SPLIT:
- {
- const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
- const Node* const leftChild = node->split.leftChild;
- CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
- const Node* const rightChild = leftChild->buddy;
- CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
- }
- break;
- default:
- VMA_ASSERT(0);
- }
+ switch(node->type)
+ {
+ case Node::TYPE_FREE:
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += levelNodeSize;
+ outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, levelNodeSize);
+ outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, levelNodeSize);
+ break;
+ case Node::TYPE_ALLOCATION:
+ {
+ const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
+ ++outInfo.allocationCount;
+ outInfo.usedBytes += allocSize;
+ outInfo.allocationSizeMax = VMA_MAX(outInfo.allocationSizeMax, allocSize);
+ outInfo.allocationSizeMin = VMA_MAX(outInfo.allocationSizeMin, allocSize);
+
+ const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
+ if(unusedRangeSize > 0)
+ {
+ ++outInfo.unusedRangeCount;
+ outInfo.unusedBytes += unusedRangeSize;
+ outInfo.unusedRangeSizeMax = VMA_MAX(outInfo.unusedRangeSizeMax, unusedRangeSize);
+ outInfo.unusedRangeSizeMin = VMA_MAX(outInfo.unusedRangeSizeMin, unusedRangeSize);
+ }
+ }
+ break;
+ case Node::TYPE_SPLIT:
+ {
+ const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
+ const Node* const leftChild = node->split.leftChild;
+ CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
+ const Node* const rightChild = leftChild->buddy;
+ CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
+ }
+ break;
+ default:
+ VMA_ASSERT(0);
+ }
}
void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
{
- VMA_ASSERT(node->type == Node::TYPE_FREE);
-
- // List is empty.
- Node* const frontNode = m_FreeList[level].front;
- if(frontNode == VMA_NULL)
- {
- VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
- node->free.prev = node->free.next = VMA_NULL;
- m_FreeList[level].front = m_FreeList[level].back = node;
- }
- else
- {
- VMA_ASSERT(frontNode->free.prev == VMA_NULL);
- node->free.prev = VMA_NULL;
- node->free.next = frontNode;
- frontNode->free.prev = node;
- m_FreeList[level].front = node;
- }
+ VMA_ASSERT(node->type == Node::TYPE_FREE);
+
+ // List is empty.
+ Node* const frontNode = m_FreeList[level].front;
+ if(frontNode == VMA_NULL)
+ {
+ VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
+ node->free.prev = node->free.next = VMA_NULL;
+ m_FreeList[level].front = m_FreeList[level].back = node;
+ }
+ else
+ {
+ VMA_ASSERT(frontNode->free.prev == VMA_NULL);
+ node->free.prev = VMA_NULL;
+ node->free.next = frontNode;
+ frontNode->free.prev = node;
+ m_FreeList[level].front = node;
+ }
}
void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
{
- VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
-
- // It is at the front.
- if(node->free.prev == VMA_NULL)
- {
- VMA_ASSERT(m_FreeList[level].front == node);
- m_FreeList[level].front = node->free.next;
- }
- else
- {
- Node* const prevFreeNode = node->free.prev;
- VMA_ASSERT(prevFreeNode->free.next == node);
- prevFreeNode->free.next = node->free.next;
- }
-
- // It is at the back.
- if(node->free.next == VMA_NULL)
- {
- VMA_ASSERT(m_FreeList[level].back == node);
- m_FreeList[level].back = node->free.prev;
- }
- else
- {
- Node* const nextFreeNode = node->free.next;
- VMA_ASSERT(nextFreeNode->free.prev == node);
- nextFreeNode->free.prev = node->free.prev;
- }
+ VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
+
+ // It is at the front.
+ if(node->free.prev == VMA_NULL)
+ {
+ VMA_ASSERT(m_FreeList[level].front == node);
+ m_FreeList[level].front = node->free.next;
+ }
+ else
+ {
+ Node* const prevFreeNode = node->free.prev;
+ VMA_ASSERT(prevFreeNode->free.next == node);
+ prevFreeNode->free.next = node->free.next;
+ }
+
+ // It is at the back.
+ if(node->free.next == VMA_NULL)
+ {
+ VMA_ASSERT(m_FreeList[level].back == node);
+ m_FreeList[level].back = node->free.prev;
+ }
+ else
+ {
+ Node* const nextFreeNode = node->free.next;
+ VMA_ASSERT(nextFreeNode->free.prev == node);
+ nextFreeNode->free.prev = node->free.prev;
+ }
}
#if VMA_STATS_STRING_ENABLED
void VmaBlockMetadata_Buddy::PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const
{
- switch(node->type)
- {
- case Node::TYPE_FREE:
- PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
- break;
- case Node::TYPE_ALLOCATION:
- {
- PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
- const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
- if(allocSize < levelNodeSize)
- {
- PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
- }
- }
- break;
- case Node::TYPE_SPLIT:
- {
- const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
- const Node* const leftChild = node->split.leftChild;
- PrintDetailedMapNode(json, leftChild, childrenNodeSize);
- const Node* const rightChild = leftChild->buddy;
- PrintDetailedMapNode(json, rightChild, childrenNodeSize);
- }
- break;
- default:
- VMA_ASSERT(0);
- }
+ switch(node->type)
+ {
+ case Node::TYPE_FREE:
+ PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
+ break;
+ case Node::TYPE_ALLOCATION:
+ {
+ PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
+ const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
+ if(allocSize < levelNodeSize)
+ {
+ PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
+ }
+ }
+ break;
+ case Node::TYPE_SPLIT:
+ {
+ const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
+ const Node* const leftChild = node->split.leftChild;
+ PrintDetailedMapNode(json, leftChild, childrenNodeSize);
+ const Node* const rightChild = leftChild->buddy;
+ PrintDetailedMapNode(json, rightChild, childrenNodeSize);
+ }
+ break;
+ default:
+ VMA_ASSERT(0);
+ }
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -11624,274 +11462,274 @@ void VmaBlockMetadata_Buddy::PrintDetailedMapNode(class VmaJsonWriter& json, con
// class VmaDeviceMemoryBlock
VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
- m_pMetadata(VMA_NULL),
- m_MemoryTypeIndex(UINT32_MAX),
- m_Id(0),
- m_hMemory(VK_NULL_HANDLE),
- m_MapCount(0),
- m_pMappedData(VMA_NULL)
+ m_pMetadata(VMA_NULL),
+ m_MemoryTypeIndex(UINT32_MAX),
+ m_Id(0),
+ m_hMemory(VK_NULL_HANDLE),
+ m_MapCount(0),
+ m_pMappedData(VMA_NULL)
{
}
void VmaDeviceMemoryBlock::Init(
- VmaAllocator hAllocator,
- VmaPool hParentPool,
- uint32_t newMemoryTypeIndex,
- VkDeviceMemory newMemory,
- VkDeviceSize newSize,
- uint32_t id,
- uint32_t algorithm)
-{
- VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
-
- m_hParentPool = hParentPool;
- m_MemoryTypeIndex = newMemoryTypeIndex;
- m_Id = id;
- m_hMemory = newMemory;
-
- switch(algorithm)
- {
- case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
- m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
- break;
- case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
- m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
- break;
- default:
- VMA_ASSERT(0);
- // Fall-through.
- case 0:
- m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
- }
- m_pMetadata->Init(newSize);
+ VmaAllocator hAllocator,
+ VmaPool hParentPool,
+ uint32_t newMemoryTypeIndex,
+ VkDeviceMemory newMemory,
+ VkDeviceSize newSize,
+ uint32_t id,
+ uint32_t algorithm)
+{
+ VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
+
+ m_hParentPool = hParentPool;
+ m_MemoryTypeIndex = newMemoryTypeIndex;
+ m_Id = id;
+ m_hMemory = newMemory;
+
+ switch(algorithm)
+ {
+ case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
+ m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
+ break;
+ case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
+ m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
+ break;
+ default:
+ VMA_ASSERT(0);
+ // Fall-through.
+ case 0:
+ m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
+ }
+ m_pMetadata->Init(newSize);
}
void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
{
- // This is the most important assert in the entire library.
- // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
- VMA_ASSERT(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
+ // This is the most important assert in the entire library.
+ // Hitting it means you have some memory leak - unreleased VmaAllocation objects.
+ VMA_ASSERT(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!");
- VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
- allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
- m_hMemory = VK_NULL_HANDLE;
+ VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
+ allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
+ m_hMemory = VK_NULL_HANDLE;
- vma_delete(allocator, m_pMetadata);
- m_pMetadata = VMA_NULL;
+ vma_delete(allocator, m_pMetadata);
+ m_pMetadata = VMA_NULL;
}
bool VmaDeviceMemoryBlock::Validate() const
{
- VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
- (m_pMetadata->GetSize() != 0));
-
- return m_pMetadata->Validate();
+ VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
+ (m_pMetadata->GetSize() != 0));
+
+ return m_pMetadata->Validate();
}
VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator)
{
- void* pData = nullptr;
- VkResult res = Map(hAllocator, 1, &pData);
- if(res != VK_SUCCESS)
- {
- return res;
- }
+ void* pData = nullptr;
+ VkResult res = Map(hAllocator, 1, &pData);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
- res = m_pMetadata->CheckCorruption(pData);
+ res = m_pMetadata->CheckCorruption(pData);
- Unmap(hAllocator, 1);
+ Unmap(hAllocator, 1);
- return res;
+ return res;
}
VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData)
{
- if(count == 0)
- {
- return VK_SUCCESS;
- }
-
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- if(m_MapCount != 0)
- {
- m_MapCount += count;
- VMA_ASSERT(m_pMappedData != VMA_NULL);
- if(ppData != VMA_NULL)
- {
- *ppData = m_pMappedData;
- }
- return VK_SUCCESS;
- }
- else
- {
- VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
- hAllocator->m_hDevice,
- m_hMemory,
- 0, // offset
- VK_WHOLE_SIZE,
- 0, // flags
- &m_pMappedData);
- if(result == VK_SUCCESS)
- {
- if(ppData != VMA_NULL)
- {
- *ppData = m_pMappedData;
- }
- m_MapCount = count;
- }
- return result;
- }
+ if(count == 0)
+ {
+ return VK_SUCCESS;
+ }
+
+ VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+ if(m_MapCount != 0)
+ {
+ m_MapCount += count;
+ VMA_ASSERT(m_pMappedData != VMA_NULL);
+ if(ppData != VMA_NULL)
+ {
+ *ppData = m_pMappedData;
+ }
+ return VK_SUCCESS;
+ }
+ else
+ {
+ VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
+ hAllocator->m_hDevice,
+ m_hMemory,
+ 0, // offset
+ VK_WHOLE_SIZE,
+ 0, // flags
+ &m_pMappedData);
+ if(result == VK_SUCCESS)
+ {
+ if(ppData != VMA_NULL)
+ {
+ *ppData = m_pMappedData;
+ }
+ m_MapCount = count;
+ }
+ return result;
+ }
}
void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
{
- if(count == 0)
- {
- return;
- }
-
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- if(m_MapCount >= count)
- {
- m_MapCount -= count;
- if(m_MapCount == 0)
- {
- m_pMappedData = VMA_NULL;
- (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
- }
- }
- else
- {
- VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
- }
+ if(count == 0)
+ {
+ return;
+ }
+
+ VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+ if(m_MapCount >= count)
+ {
+ m_MapCount -= count;
+ if(m_MapCount == 0)
+ {
+ m_pMappedData = VMA_NULL;
+ (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
+ }
+ }
+ else
+ {
+ VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped.");
+ }
}
VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
{
- VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
- VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
+ VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
+ VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
- void* pData;
- VkResult res = Map(hAllocator, 1, &pData);
- if(res != VK_SUCCESS)
- {
- return res;
- }
+ void* pData;
+ VkResult res = Map(hAllocator, 1, &pData);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
- VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
- VmaWriteMagicValue(pData, allocOffset + allocSize);
+ VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
+ VmaWriteMagicValue(pData, allocOffset + allocSize);
- Unmap(hAllocator, 1);
+ Unmap(hAllocator, 1);
- return VK_SUCCESS;
+ return VK_SUCCESS;
}
VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
{
- VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
- VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
+ VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
+ VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
- void* pData;
- VkResult res = Map(hAllocator, 1, &pData);
- if(res != VK_SUCCESS)
- {
- return res;
- }
+ void* pData;
+ VkResult res = Map(hAllocator, 1, &pData);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
- if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
- }
- else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
- {
- VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
- }
+ if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
+ }
+ else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
+ {
+ VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
+ }
- Unmap(hAllocator, 1);
+ Unmap(hAllocator, 1);
- return VK_SUCCESS;
+ return VK_SUCCESS;
}
VkResult VmaDeviceMemoryBlock::BindBufferMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkBuffer hBuffer,
- const void* pNext)
-{
- VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
- hAllocation->GetBlock() == this);
- VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
- "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
- const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
- // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
+ const VmaAllocator hAllocator,
+ const VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkBuffer hBuffer,
+ const void* pNext)
+{
+ VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
+ hAllocation->GetBlock() == this);
+ VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
+ "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
+ const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
+ // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
+ VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+ return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
}
VkResult VmaDeviceMemoryBlock::BindImageMemory(
- const VmaAllocator hAllocator,
- const VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkImage hImage,
- const void* pNext)
-{
- VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
- hAllocation->GetBlock() == this);
- VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
- "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
- const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
- // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
- VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
- return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
+ const VmaAllocator hAllocator,
+ const VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkImage hImage,
+ const void* pNext)
+{
+ VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
+ hAllocation->GetBlock() == this);
+ VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
+ "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
+ const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
+ // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads.
+ VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
+ return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
}
static void InitStatInfo(VmaStatInfo& outInfo)
{
- memset(&outInfo, 0, sizeof(outInfo));
- outInfo.allocationSizeMin = UINT64_MAX;
- outInfo.unusedRangeSizeMin = UINT64_MAX;
+ memset(&outInfo, 0, sizeof(outInfo));
+ outInfo.allocationSizeMin = UINT64_MAX;
+ outInfo.unusedRangeSizeMin = UINT64_MAX;
}
// Adds statistics srcInfo into inoutInfo, like: inoutInfo += srcInfo.
static void VmaAddStatInfo(VmaStatInfo& inoutInfo, const VmaStatInfo& srcInfo)
{
- inoutInfo.blockCount += srcInfo.blockCount;
- inoutInfo.allocationCount += srcInfo.allocationCount;
- inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
- inoutInfo.usedBytes += srcInfo.usedBytes;
- inoutInfo.unusedBytes += srcInfo.unusedBytes;
- inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
- inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
- inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
- inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
+ inoutInfo.blockCount += srcInfo.blockCount;
+ inoutInfo.allocationCount += srcInfo.allocationCount;
+ inoutInfo.unusedRangeCount += srcInfo.unusedRangeCount;
+ inoutInfo.usedBytes += srcInfo.usedBytes;
+ inoutInfo.unusedBytes += srcInfo.unusedBytes;
+ inoutInfo.allocationSizeMin = VMA_MIN(inoutInfo.allocationSizeMin, srcInfo.allocationSizeMin);
+ inoutInfo.allocationSizeMax = VMA_MAX(inoutInfo.allocationSizeMax, srcInfo.allocationSizeMax);
+ inoutInfo.unusedRangeSizeMin = VMA_MIN(inoutInfo.unusedRangeSizeMin, srcInfo.unusedRangeSizeMin);
+ inoutInfo.unusedRangeSizeMax = VMA_MAX(inoutInfo.unusedRangeSizeMax, srcInfo.unusedRangeSizeMax);
}
static void VmaPostprocessCalcStatInfo(VmaStatInfo& inoutInfo)
{
- inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
- VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
- inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
- VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
+ inoutInfo.allocationSizeAvg = (inoutInfo.allocationCount > 0) ?
+ VmaRoundDiv<VkDeviceSize>(inoutInfo.usedBytes, inoutInfo.allocationCount) : 0;
+ inoutInfo.unusedRangeSizeAvg = (inoutInfo.unusedRangeCount > 0) ?
+ VmaRoundDiv<VkDeviceSize>(inoutInfo.unusedBytes, inoutInfo.unusedRangeCount) : 0;
}
VmaPool_T::VmaPool_T(
- VmaAllocator hAllocator,
- const VmaPoolCreateInfo& createInfo,
- VkDeviceSize preferredBlockSize) :
- m_BlockVector(
- hAllocator,
- this, // hParentPool
- createInfo.memoryTypeIndex,
- createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
- createInfo.minBlockCount,
- createInfo.maxBlockCount,
- (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
- createInfo.frameInUseCount,
- createInfo.blockSize != 0, // explicitBlockSize
- createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK), // algorithm
- m_Id(0),
- m_Name(VMA_NULL)
+ VmaAllocator hAllocator,
+ const VmaPoolCreateInfo& createInfo,
+ VkDeviceSize preferredBlockSize) :
+ m_BlockVector(
+ hAllocator,
+ this, // hParentPool
+ createInfo.memoryTypeIndex,
+ createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
+ createInfo.minBlockCount,
+ createInfo.maxBlockCount,
+ (createInfo.flags & VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(),
+ createInfo.frameInUseCount,
+ createInfo.blockSize != 0, // explicitBlockSize
+ createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK), // algorithm
+ m_Id(0),
+ m_Name(VMA_NULL)
{
}
@@ -11901,17 +11739,17 @@ VmaPool_T::~VmaPool_T()
void VmaPool_T::SetName(const char* pName)
{
- const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
- VmaFreeString(allocs, m_Name);
-
- if(pName != VMA_NULL)
- {
- m_Name = VmaCreateStringCopy(allocs, pName);
- }
- else
- {
- m_Name = VMA_NULL;
- }
+ const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
+ VmaFreeString(allocs, m_Name);
+
+ if(pName != VMA_NULL)
+ {
+ m_Name = VmaCreateStringCopy(allocs, pName);
+ }
+ else
+ {
+ m_Name = VMA_NULL;
+ }
}
#if VMA_STATS_STRING_ENABLED
@@ -11919,1637 +11757,1563 @@ void VmaPool_T::SetName(const char* pName)
#endif // #if VMA_STATS_STRING_ENABLED
VmaBlockVector::VmaBlockVector(
- VmaAllocator hAllocator,
- VmaPool hParentPool,
- uint32_t memoryTypeIndex,
- VkDeviceSize preferredBlockSize,
- size_t minBlockCount,
- size_t maxBlockCount,
- VkDeviceSize bufferImageGranularity,
- uint32_t frameInUseCount,
- bool explicitBlockSize,
- uint32_t algorithm) :
- m_hAllocator(hAllocator),
- m_hParentPool(hParentPool),
- m_MemoryTypeIndex(memoryTypeIndex),
- m_PreferredBlockSize(preferredBlockSize),
- m_MinBlockCount(minBlockCount),
- m_MaxBlockCount(maxBlockCount),
- m_BufferImageGranularity(bufferImageGranularity),
- m_FrameInUseCount(frameInUseCount),
- m_ExplicitBlockSize(explicitBlockSize),
- m_Algorithm(algorithm),
- m_HasEmptyBlock(false),
- m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
- m_NextBlockId(0)
+ VmaAllocator hAllocator,
+ VmaPool hParentPool,
+ uint32_t memoryTypeIndex,
+ VkDeviceSize preferredBlockSize,
+ size_t minBlockCount,
+ size_t maxBlockCount,
+ VkDeviceSize bufferImageGranularity,
+ uint32_t frameInUseCount,
+ bool explicitBlockSize,
+ uint32_t algorithm) :
+ m_hAllocator(hAllocator),
+ m_hParentPool(hParentPool),
+ m_MemoryTypeIndex(memoryTypeIndex),
+ m_PreferredBlockSize(preferredBlockSize),
+ m_MinBlockCount(minBlockCount),
+ m_MaxBlockCount(maxBlockCount),
+ m_BufferImageGranularity(bufferImageGranularity),
+ m_FrameInUseCount(frameInUseCount),
+ m_ExplicitBlockSize(explicitBlockSize),
+ m_Algorithm(algorithm),
+ m_HasEmptyBlock(false),
+ m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
+ m_NextBlockId(0)
{
}
VmaBlockVector::~VmaBlockVector()
{
- for(size_t i = m_Blocks.size(); i--; )
- {
- m_Blocks[i]->Destroy(m_hAllocator);
- vma_delete(m_hAllocator, m_Blocks[i]);
- }
+ for(size_t i = m_Blocks.size(); i--; )
+ {
+ m_Blocks[i]->Destroy(m_hAllocator);
+ vma_delete(m_hAllocator, m_Blocks[i]);
+ }
}
VkResult VmaBlockVector::CreateMinBlocks()
{
- for(size_t i = 0; i < m_MinBlockCount; ++i)
- {
- VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
- return VK_SUCCESS;
+ for(size_t i = 0; i < m_MinBlockCount; ++i)
+ {
+ VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
+ }
+ return VK_SUCCESS;
}
void VmaBlockVector::GetPoolStats(VmaPoolStats* pStats)
{
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+ VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
- const size_t blockCount = m_Blocks.size();
+ const size_t blockCount = m_Blocks.size();
- pStats->size = 0;
- pStats->unusedSize = 0;
- pStats->allocationCount = 0;
- pStats->unusedRangeCount = 0;
- pStats->unusedRangeSizeMax = 0;
- pStats->blockCount = blockCount;
+ pStats->size = 0;
+ pStats->unusedSize = 0;
+ pStats->allocationCount = 0;
+ pStats->unusedRangeCount = 0;
+ pStats->unusedRangeSizeMax = 0;
+ pStats->blockCount = blockCount;
- for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- VMA_HEAVY_ASSERT(pBlock->Validate());
- pBlock->m_pMetadata->AddPoolStats(*pStats);
- }
+ for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+ {
+ const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pBlock);
+ VMA_HEAVY_ASSERT(pBlock->Validate());
+ pBlock->m_pMetadata->AddPoolStats(*pStats);
+ }
}
bool VmaBlockVector::IsEmpty()
{
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
- return m_Blocks.empty();
+ VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+ return m_Blocks.empty();
}
bool VmaBlockVector::IsCorruptionDetectionEnabled() const
{
- const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
- (VMA_DEBUG_MARGIN > 0) &&
- (m_Algorithm == 0 || m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) &&
- (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
+ const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
+ (VMA_DEBUG_MARGIN > 0) &&
+ (m_Algorithm == 0 || m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) &&
+ (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
}
static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
VkResult VmaBlockVector::Allocate(
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- size_t allocIndex;
- VkResult res = VK_SUCCESS;
-
- if(IsCorruptionDetectionEnabled())
- {
- size = VmaAlignUp<VkDeviceSize>(size, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
- alignment = VmaAlignUp<VkDeviceSize>(alignment, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
- }
-
- {
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
- for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- res = AllocatePage(
- currentFrameIndex,
- size,
- alignment,
- createInfo,
- suballocType,
- pAllocations + allocIndex);
- if(res != VK_SUCCESS)
- {
- break;
- }
- }
- }
-
- if(res != VK_SUCCESS)
- {
- // Free all already created allocations.
- while(allocIndex--)
- {
- Free(pAllocations[allocIndex]);
- }
- memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
- }
-
- return res;
+ uint32_t currentFrameIndex,
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaSuballocationType suballocType,
+ size_t allocationCount,
+ VmaAllocation* pAllocations)
+{
+ size_t allocIndex;
+ VkResult res = VK_SUCCESS;
+
+ if(IsCorruptionDetectionEnabled())
+ {
+ size = VmaAlignUp<VkDeviceSize>(size, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
+ alignment = VmaAlignUp<VkDeviceSize>(alignment, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
+ }
+
+ {
+ VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+ for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+ {
+ res = AllocatePage(
+ currentFrameIndex,
+ size,
+ alignment,
+ createInfo,
+ suballocType,
+ pAllocations + allocIndex);
+ if(res != VK_SUCCESS)
+ {
+ break;
+ }
+ }
+ }
+
+ if(res != VK_SUCCESS)
+ {
+ // Free all already created allocations.
+ while(allocIndex--)
+ {
+ Free(pAllocations[allocIndex]);
+ }
+ memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
+ }
+
+ return res;
}
VkResult VmaBlockVector::AllocatePage(
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- VmaAllocation* pAllocation)
-{
- const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
- bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
- const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
- const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
-
- const bool withinBudget = (createInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0;
- VkDeviceSize freeMemory;
- {
- const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
- VmaBudget heapBudget = {};
- m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
- freeMemory = (heapBudget.usage < heapBudget.budget) ? (heapBudget.budget - heapBudget.usage) : 0;
- }
-
- const bool canFallbackToDedicated = !IsCustomPool();
- const bool canCreateNewBlock =
- ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
- (m_Blocks.size() < m_MaxBlockCount) &&
- (freeMemory >= size || !canFallbackToDedicated);
- uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK;
-
- // If linearAlgorithm is used, canMakeOtherLost is available only when used as ring buffer.
- // Which in turn is available only when maxBlockCount = 1.
- if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT && m_MaxBlockCount > 1)
- {
- canMakeOtherLost = false;
- }
-
- // Upper address can only be used with linear allocator and within single memory block.
- if(isUpperAddress &&
- (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1))
- {
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
-
- // Validate strategy.
- switch(strategy)
- {
- case 0:
- strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
- break;
- case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT:
- case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT:
- case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT:
- break;
- default:
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
-
- // Early reject: requested allocation size is larger that maximum block size for this block vector.
- if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
-
- /*
- Under certain condition, this whole section can be skipped for optimization, so
- we move on directly to trying to allocate with canMakeOtherLost. That's the case
- e.g. for custom pools with linear algorithm.
- */
- if(!canMakeOtherLost || canCreateNewBlock)
- {
- // 1. Search existing allocations. Try to allocate without making other allocations lost.
- VmaAllocationCreateFlags allocFlagsCopy = createInfo.flags;
- allocFlagsCopy &= ~VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
-
- if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
- {
- // Use only last block.
- if(!m_Blocks.empty())
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back();
- VMA_ASSERT(pCurrBlock);
- VkResult res = AllocateFromBlock(
- pCurrBlock,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Returned from last block #%u", pCurrBlock->GetId());
- return VK_SUCCESS;
- }
- }
- }
- else
- {
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
- {
- // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
- for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VkResult res = AllocateFromBlock(
- pCurrBlock,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId());
- return VK_SUCCESS;
- }
- }
- }
- else // WORST_FIT, FIRST_FIT
- {
- // Backward order in m_Blocks - prefer blocks with largest amount of free space.
- for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VkResult res = AllocateFromBlock(
- pCurrBlock,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId());
- return VK_SUCCESS;
- }
- }
- }
- }
-
- // 2. Try to create new block.
- if(canCreateNewBlock)
- {
- // Calculate optimal size for new block.
- VkDeviceSize newBlockSize = m_PreferredBlockSize;
- uint32_t newBlockSizeShift = 0;
- const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
-
- if(!m_ExplicitBlockSize)
- {
- // Allocate 1/8, 1/4, 1/2 as first blocks.
- const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
- for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
- {
- const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
- if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
- {
- newBlockSize = smallerNewBlockSize;
- ++newBlockSizeShift;
- }
- else
- {
- break;
- }
- }
- }
-
- size_t newBlockIndex = 0;
- VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
- CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
- // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
- if(!m_ExplicitBlockSize)
- {
- while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
- {
- const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
- if(smallerNewBlockSize >= size)
- {
- newBlockSize = smallerNewBlockSize;
- ++newBlockSizeShift;
- res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
- CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- else
- {
- break;
- }
- }
- }
-
- if(res == VK_SUCCESS)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
- VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
-
- res = AllocateFromBlock(
- pBlock,
- currentFrameIndex,
- size,
- alignment,
- allocFlagsCopy,
- createInfo.pUserData,
- suballocType,
- strategy,
- pAllocation);
- if(res == VK_SUCCESS)
- {
- VMA_DEBUG_LOG(" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
- return VK_SUCCESS;
- }
- else
- {
- // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment.
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- }
- }
-
- // 3. Try to allocate from existing blocks with making other allocations lost.
- if(canMakeOtherLost)
- {
- uint32_t tryIndex = 0;
- for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
- {
- VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
- VmaAllocationRequest bestRequest = {};
- VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
-
- // 1. Search existing allocations.
- if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
- {
- // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
- for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VmaAllocationRequest currRequest = {};
- if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
- currentFrameIndex,
- m_FrameInUseCount,
- m_BufferImageGranularity,
- size,
- alignment,
- (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
- suballocType,
- canMakeOtherLost,
- strategy,
- &currRequest))
- {
- const VkDeviceSize currRequestCost = currRequest.CalcCost();
- if(pBestRequestBlock == VMA_NULL ||
- currRequestCost < bestRequestCost)
- {
- pBestRequestBlock = pCurrBlock;
- bestRequest = currRequest;
- bestRequestCost = currRequestCost;
-
- if(bestRequestCost == 0)
- {
- break;
- }
- }
- }
- }
- }
- else // WORST_FIT, FIRST_FIT
- {
- // Backward order in m_Blocks - prefer blocks with largest amount of free space.
- for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
- {
- VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pCurrBlock);
- VmaAllocationRequest currRequest = {};
- if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
- currentFrameIndex,
- m_FrameInUseCount,
- m_BufferImageGranularity,
- size,
- alignment,
- (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
- suballocType,
- canMakeOtherLost,
- strategy,
- &currRequest))
- {
- const VkDeviceSize currRequestCost = currRequest.CalcCost();
- if(pBestRequestBlock == VMA_NULL ||
- currRequestCost < bestRequestCost ||
- strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
- {
- pBestRequestBlock = pCurrBlock;
- bestRequest = currRequest;
- bestRequestCost = currRequestCost;
-
- if(bestRequestCost == 0 ||
- strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
- {
- break;
- }
- }
- }
- }
- }
-
- if(pBestRequestBlock != VMA_NULL)
- {
- if(mapped)
- {
- VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
-
- if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
- currentFrameIndex,
- m_FrameInUseCount,
- &bestRequest))
- {
- // Allocate from this pBlock.
- *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
- pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
- UpdateHasEmptyBlock();
- (*pAllocation)->InitBlockAllocation(
- pBestRequestBlock,
- bestRequest.offset,
- alignment,
- size,
- m_MemoryTypeIndex,
- suballocType,
- mapped,
- (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
- VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
- VMA_DEBUG_LOG(" Returned from existing block");
- (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
- m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
- }
- if(IsCorruptionDetectionEnabled())
- {
- VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
- VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
- }
- return VK_SUCCESS;
- }
- // else: Some allocations must have been touched while we are here. Next try.
- }
- else
- {
- // Could not find place in any of the blocks - break outer loop.
- break;
- }
- }
- /* Maximum number of tries exceeded - a very unlike event when many other
- threads are simultaneously touching allocations making it impossible to make
- lost at the same time as we try to allocate. */
- if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
- {
- return VK_ERROR_TOO_MANY_OBJECTS;
- }
- }
-
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ uint32_t currentFrameIndex,
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaSuballocationType suballocType,
+ VmaAllocation* pAllocation)
+{
+ const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
+ bool canMakeOtherLost = (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) != 0;
+ const bool mapped = (createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
+ const bool isUserDataString = (createInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
+
+ const bool withinBudget = (createInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0;
+ VkDeviceSize freeMemory;
+ {
+ const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
+ VmaBudget heapBudget = {};
+ m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
+ freeMemory = (heapBudget.usage < heapBudget.budget) ? (heapBudget.budget - heapBudget.usage) : 0;
+ }
+
+ const bool canFallbackToDedicated = !IsCustomPool();
+ const bool canCreateNewBlock =
+ ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) &&
+ (m_Blocks.size() < m_MaxBlockCount) &&
+ (freeMemory >= size || !canFallbackToDedicated);
+ uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK;
+
+ // If linearAlgorithm is used, canMakeOtherLost is available only when used as ring buffer.
+ // Which in turn is available only when maxBlockCount = 1.
+ if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT && m_MaxBlockCount > 1)
+ {
+ canMakeOtherLost = false;
+ }
+
+ // Upper address can only be used with linear allocator and within single memory block.
+ if(isUpperAddress &&
+ (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1))
+ {
+ return VK_ERROR_FEATURE_NOT_PRESENT;
+ }
+
+ // Validate strategy.
+ switch(strategy)
+ {
+ case 0:
+ strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
+ break;
+ case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT:
+ case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT:
+ case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT:
+ break;
+ default:
+ return VK_ERROR_FEATURE_NOT_PRESENT;
+ }
+
+ // Early reject: requested allocation size is larger that maximum block size for this block vector.
+ if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
+ {
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+
+ /*
+ Under certain condition, this whole section can be skipped for optimization, so
+ we move on directly to trying to allocate with canMakeOtherLost. That's the case
+ e.g. for custom pools with linear algorithm.
+ */
+ if(!canMakeOtherLost || canCreateNewBlock)
+ {
+ // 1. Search existing allocations. Try to allocate without making other allocations lost.
+ VmaAllocationCreateFlags allocFlagsCopy = createInfo.flags;
+ allocFlagsCopy &= ~VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
+
+ if(m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
+ {
+ // Use only last block.
+ if(!m_Blocks.empty())
+ {
+ VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back();
+ VMA_ASSERT(pCurrBlock);
+ VkResult res = AllocateFromBlock(
+ pCurrBlock,
+ currentFrameIndex,
+ size,
+ alignment,
+ allocFlagsCopy,
+ createInfo.pUserData,
+ suballocType,
+ strategy,
+ pAllocation);
+ if(res == VK_SUCCESS)
+ {
+ VMA_DEBUG_LOG(" Returned from last block #%u", pCurrBlock->GetId());
+ return VK_SUCCESS;
+ }
+ }
+ }
+ else
+ {
+ if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
+ {
+ // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
+ for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
+ {
+ VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pCurrBlock);
+ VkResult res = AllocateFromBlock(
+ pCurrBlock,
+ currentFrameIndex,
+ size,
+ alignment,
+ allocFlagsCopy,
+ createInfo.pUserData,
+ suballocType,
+ strategy,
+ pAllocation);
+ if(res == VK_SUCCESS)
+ {
+ VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId());
+ return VK_SUCCESS;
+ }
+ }
+ }
+ else // WORST_FIT, FIRST_FIT
+ {
+ // Backward order in m_Blocks - prefer blocks with largest amount of free space.
+ for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
+ {
+ VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pCurrBlock);
+ VkResult res = AllocateFromBlock(
+ pCurrBlock,
+ currentFrameIndex,
+ size,
+ alignment,
+ allocFlagsCopy,
+ createInfo.pUserData,
+ suballocType,
+ strategy,
+ pAllocation);
+ if(res == VK_SUCCESS)
+ {
+ VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId());
+ return VK_SUCCESS;
+ }
+ }
+ }
+ }
+
+ // 2. Try to create new block.
+ if(canCreateNewBlock)
+ {
+ // Calculate optimal size for new block.
+ VkDeviceSize newBlockSize = m_PreferredBlockSize;
+ uint32_t newBlockSizeShift = 0;
+ const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
+
+ if(!m_ExplicitBlockSize)
+ {
+ // Allocate 1/8, 1/4, 1/2 as first blocks.
+ const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
+ for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
+ {
+ const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
+ if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
+ {
+ newBlockSize = smallerNewBlockSize;
+ ++newBlockSizeShift;
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+
+ size_t newBlockIndex = 0;
+ VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
+ CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize.
+ if(!m_ExplicitBlockSize)
+ {
+ while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
+ {
+ const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
+ if(smallerNewBlockSize >= size)
+ {
+ newBlockSize = smallerNewBlockSize;
+ ++newBlockSizeShift;
+ res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
+ CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+
+ if(res == VK_SUCCESS)
+ {
+ VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex];
+ VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
+
+ res = AllocateFromBlock(
+ pBlock,
+ currentFrameIndex,
+ size,
+ alignment,
+ allocFlagsCopy,
+ createInfo.pUserData,
+ suballocType,
+ strategy,
+ pAllocation);
+ if(res == VK_SUCCESS)
+ {
+ VMA_DEBUG_LOG(" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
+ return VK_SUCCESS;
+ }
+ else
+ {
+ // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment.
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ }
+ }
+ }
+
+ // 3. Try to allocate from existing blocks with making other allocations lost.
+ if(canMakeOtherLost)
+ {
+ uint32_t tryIndex = 0;
+ for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
+ {
+ VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
+ VmaAllocationRequest bestRequest = {};
+ VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
+
+ // 1. Search existing allocations.
+ if(strategy == VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT)
+ {
+ // Forward order in m_Blocks - prefer blocks with smallest amount of free space.
+ for(size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
+ {
+ VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pCurrBlock);
+ VmaAllocationRequest currRequest = {};
+ if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
+ currentFrameIndex,
+ m_FrameInUseCount,
+ m_BufferImageGranularity,
+ size,
+ alignment,
+ (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
+ suballocType,
+ canMakeOtherLost,
+ strategy,
+ &currRequest))
+ {
+ const VkDeviceSize currRequestCost = currRequest.CalcCost();
+ if(pBestRequestBlock == VMA_NULL ||
+ currRequestCost < bestRequestCost)
+ {
+ pBestRequestBlock = pCurrBlock;
+ bestRequest = currRequest;
+ bestRequestCost = currRequestCost;
+
+ if(bestRequestCost == 0)
+ {
+ break;
+ }
+ }
+ }
+ }
+ }
+ else // WORST_FIT, FIRST_FIT
+ {
+ // Backward order in m_Blocks - prefer blocks with largest amount of free space.
+ for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
+ {
+ VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pCurrBlock);
+ VmaAllocationRequest currRequest = {};
+ if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
+ currentFrameIndex,
+ m_FrameInUseCount,
+ m_BufferImageGranularity,
+ size,
+ alignment,
+ (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0,
+ suballocType,
+ canMakeOtherLost,
+ strategy,
+ &currRequest))
+ {
+ const VkDeviceSize currRequestCost = currRequest.CalcCost();
+ if(pBestRequestBlock == VMA_NULL ||
+ currRequestCost < bestRequestCost ||
+ strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
+ {
+ pBestRequestBlock = pCurrBlock;
+ bestRequest = currRequest;
+ bestRequestCost = currRequestCost;
+
+ if(bestRequestCost == 0 ||
+ strategy == VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT)
+ {
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ if(pBestRequestBlock != VMA_NULL)
+ {
+ if(mapped)
+ {
+ VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
+ }
+
+ if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
+ currentFrameIndex,
+ m_FrameInUseCount,
+ &bestRequest))
+ {
+ // Allocate from this pBlock.
+ *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
+ (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
+ pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
+ UpdateHasEmptyBlock();
+ (*pAllocation)->InitBlockAllocation(
+ pBestRequestBlock,
+ bestRequest.offset,
+ alignment,
+ size,
+ m_MemoryTypeIndex,
+ suballocType,
+ mapped,
+ (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
+ VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
+ VMA_DEBUG_LOG(" Returned from existing block");
+ (*pAllocation)->SetUserData(m_hAllocator, createInfo.pUserData);
+ m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
+ if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+ {
+ m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+ }
+ if(IsCorruptionDetectionEnabled())
+ {
+ VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
+ VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
+ }
+ return VK_SUCCESS;
+ }
+ // else: Some allocations must have been touched while we are here. Next try.
+ }
+ else
+ {
+ // Could not find place in any of the blocks - break outer loop.
+ break;
+ }
+ }
+ /* Maximum number of tries exceeded - a very unlike event when many other
+ threads are simultaneously touching allocations making it impossible to make
+ lost at the same time as we try to allocate. */
+ if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
+ {
+ return VK_ERROR_TOO_MANY_OBJECTS;
+ }
+ }
+
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
void VmaBlockVector::Free(
- const VmaAllocation hAllocation)
-{
- VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
-
- bool budgetExceeded = false;
- {
- const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
- VmaBudget heapBudget = {};
- m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
- budgetExceeded = heapBudget.usage >= heapBudget.budget;
- }
-
- // Scope for lock.
- {
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
-
- if(IsCorruptionDetectionEnabled())
- {
- VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
- VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value.");
- }
-
- if(hAllocation->IsPersistentMap())
- {
- pBlock->Unmap(m_hAllocator, 1);
- }
-
- pBlock->m_pMetadata->Free(hAllocation);
- VMA_HEAVY_ASSERT(pBlock->Validate());
-
- VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
-
- const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
- // pBlock became empty after this deallocation.
- if(pBlock->m_pMetadata->IsEmpty())
- {
- // Already has empty block. We don't want to have two, so delete this one.
- if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
- {
- pBlockToDelete = pBlock;
- Remove(pBlock);
- }
- // else: We now have an empty block - leave it.
- }
- // pBlock didn't become empty, but we have another empty block - find and free that one.
- // (This is optional, heuristics.)
- else if(m_HasEmptyBlock && canDeleteBlock)
- {
- VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
- if(pLastBlock->m_pMetadata->IsEmpty())
- {
- pBlockToDelete = pLastBlock;
- m_Blocks.pop_back();
- }
- }
-
- UpdateHasEmptyBlock();
- IncrementallySortBlocks();
- }
-
- // Destruction of a free block. Deferred until this point, outside of mutex
- // lock, for performance reason.
- if(pBlockToDelete != VMA_NULL)
- {
- VMA_DEBUG_LOG(" Deleted empty block");
- pBlockToDelete->Destroy(m_hAllocator);
- vma_delete(m_hAllocator, pBlockToDelete);
- }
+ const VmaAllocation hAllocation)
+{
+ VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
+
+ bool budgetExceeded = false;
+ {
+ const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
+ VmaBudget heapBudget = {};
+ m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
+ budgetExceeded = heapBudget.usage >= heapBudget.budget;
+ }
+
+ // Scope for lock.
+ {
+ VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+ VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+
+ if(IsCorruptionDetectionEnabled())
+ {
+ VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
+ VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value.");
+ }
+
+ if(hAllocation->IsPersistentMap())
+ {
+ pBlock->Unmap(m_hAllocator, 1);
+ }
+
+ pBlock->m_pMetadata->Free(hAllocation);
+ VMA_HEAVY_ASSERT(pBlock->Validate());
+
+ VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
+
+ const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
+ // pBlock became empty after this deallocation.
+ if(pBlock->m_pMetadata->IsEmpty())
+ {
+ // Already has empty block. We don't want to have two, so delete this one.
+ if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
+ {
+ pBlockToDelete = pBlock;
+ Remove(pBlock);
+ }
+ // else: We now have an empty block - leave it.
+ }
+ // pBlock didn't become empty, but we have another empty block - find and free that one.
+ // (This is optional, heuristics.)
+ else if(m_HasEmptyBlock && canDeleteBlock)
+ {
+ VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
+ if(pLastBlock->m_pMetadata->IsEmpty())
+ {
+ pBlockToDelete = pLastBlock;
+ m_Blocks.pop_back();
+ }
+ }
+
+ UpdateHasEmptyBlock();
+ IncrementallySortBlocks();
+ }
+
+ // Destruction of a free block. Deferred until this point, outside of mutex
+ // lock, for performance reason.
+ if(pBlockToDelete != VMA_NULL)
+ {
+ VMA_DEBUG_LOG(" Deleted empty block");
+ pBlockToDelete->Destroy(m_hAllocator);
+ vma_delete(m_hAllocator, pBlockToDelete);
+ }
}
VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const
{
- VkDeviceSize result = 0;
- for(size_t i = m_Blocks.size(); i--; )
- {
- result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
- if(result >= m_PreferredBlockSize)
- {
- break;
- }
- }
- return result;
+ VkDeviceSize result = 0;
+ for(size_t i = m_Blocks.size(); i--; )
+ {
+ result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
+ if(result >= m_PreferredBlockSize)
+ {
+ break;
+ }
+ }
+ return result;
}
void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
{
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- if(m_Blocks[blockIndex] == pBlock)
- {
- VmaVectorRemove(m_Blocks, blockIndex);
- return;
- }
- }
- VMA_ASSERT(0);
+ for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+ {
+ if(m_Blocks[blockIndex] == pBlock)
+ {
+ VmaVectorRemove(m_Blocks, blockIndex);
+ return;
+ }
+ }
+ VMA_ASSERT(0);
}
void VmaBlockVector::IncrementallySortBlocks()
{
- if(m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
- {
- // Bubble sort only until first swap.
- for(size_t i = 1; i < m_Blocks.size(); ++i)
- {
- if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
- {
- VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
- return;
- }
- }
- }
+ if(m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT)
+ {
+ // Bubble sort only until first swap.
+ for(size_t i = 1; i < m_Blocks.size(); ++i)
+ {
+ if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
+ {
+ VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
+ return;
+ }
+ }
+ }
}
VkResult VmaBlockVector::AllocateFromBlock(
- VmaDeviceMemoryBlock* pBlock,
- uint32_t currentFrameIndex,
- VkDeviceSize size,
- VkDeviceSize alignment,
- VmaAllocationCreateFlags allocFlags,
- void* pUserData,
- VmaSuballocationType suballocType,
- uint32_t strategy,
- VmaAllocation* pAllocation)
-{
- VMA_ASSERT((allocFlags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) == 0);
- const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
- const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
- const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
-
- VmaAllocationRequest currRequest = {};
- if(pBlock->m_pMetadata->CreateAllocationRequest(
- currentFrameIndex,
- m_FrameInUseCount,
- m_BufferImageGranularity,
- size,
- alignment,
- isUpperAddress,
- suballocType,
- false, // canMakeOtherLost
- strategy,
- &currRequest))
- {
- // Allocate from pCurrBlock.
- VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
-
- if(mapped)
- {
- VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
-
- *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
- pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
- UpdateHasEmptyBlock();
- (*pAllocation)->InitBlockAllocation(
- pBlock,
- currRequest.offset,
- alignment,
- size,
- m_MemoryTypeIndex,
- suballocType,
- mapped,
- (allocFlags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
- VMA_HEAVY_ASSERT(pBlock->Validate());
- (*pAllocation)->SetUserData(m_hAllocator, pUserData);
- m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
- }
- if(IsCorruptionDetectionEnabled())
- {
- VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
- VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
- }
- return VK_SUCCESS;
- }
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ VmaDeviceMemoryBlock* pBlock,
+ uint32_t currentFrameIndex,
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ VmaAllocationCreateFlags allocFlags,
+ void* pUserData,
+ VmaSuballocationType suballocType,
+ uint32_t strategy,
+ VmaAllocation* pAllocation)
+{
+ VMA_ASSERT((allocFlags & VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT) == 0);
+ const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0;
+ const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0;
+ const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0;
+
+ VmaAllocationRequest currRequest = {};
+ if(pBlock->m_pMetadata->CreateAllocationRequest(
+ currentFrameIndex,
+ m_FrameInUseCount,
+ m_BufferImageGranularity,
+ size,
+ alignment,
+ isUpperAddress,
+ suballocType,
+ false, // canMakeOtherLost
+ strategy,
+ &currRequest))
+ {
+ // Allocate from pCurrBlock.
+ VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
+
+ if(mapped)
+ {
+ VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
+ }
+
+ *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
+ (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
+ pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
+ UpdateHasEmptyBlock();
+ (*pAllocation)->InitBlockAllocation(
+ pBlock,
+ currRequest.offset,
+ alignment,
+ size,
+ m_MemoryTypeIndex,
+ suballocType,
+ mapped,
+ (allocFlags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0);
+ VMA_HEAVY_ASSERT(pBlock->Validate());
+ (*pAllocation)->SetUserData(m_hAllocator, pUserData);
+ m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
+ if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+ {
+ m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+ }
+ if(IsCorruptionDetectionEnabled())
+ {
+ VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
+ VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value.");
+ }
+ return VK_SUCCESS;
+ }
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
}
VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex)
{
- VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
- allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
- allocInfo.allocationSize = blockSize;
- VkDeviceMemory mem = VK_NULL_HANDLE;
- VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
- if(res < 0)
- {
- return res;
- }
-
- // New VkDeviceMemory successfully created.
-
- // Create new Allocation for it.
- VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
- pBlock->Init(
- m_hAllocator,
- m_hParentPool,
- m_MemoryTypeIndex,
- mem,
- allocInfo.allocationSize,
- m_NextBlockId++,
- m_Algorithm);
-
- m_Blocks.push_back(pBlock);
- if(pNewBlockIndex != VMA_NULL)
- {
- *pNewBlockIndex = m_Blocks.size() - 1;
- }
-
- return VK_SUCCESS;
+ VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
+ allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
+ allocInfo.allocationSize = blockSize;
+ VkDeviceMemory mem = VK_NULL_HANDLE;
+ VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
+ if(res < 0)
+ {
+ return res;
+ }
+
+ // New VkDeviceMemory successfully created.
+
+ // Create new Allocation for it.
+ VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
+ pBlock->Init(
+ m_hAllocator,
+ m_hParentPool,
+ m_MemoryTypeIndex,
+ mem,
+ allocInfo.allocationSize,
+ m_NextBlockId++,
+ m_Algorithm);
+
+ m_Blocks.push_back(pBlock);
+ if(pNewBlockIndex != VMA_NULL)
+ {
+ *pNewBlockIndex = m_Blocks.size() - 1;
+ }
+
+ return VK_SUCCESS;
}
void VmaBlockVector::ApplyDefragmentationMovesCpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
-{
- const size_t blockCount = m_Blocks.size();
- const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
-
- enum BLOCK_FLAG
- {
- BLOCK_FLAG_USED = 0x00000001,
- BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
- };
-
- struct BlockInfo
- {
- uint32_t flags;
- void* pMappedData;
- };
- VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
- blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
- memset(blockInfo.data(), 0, blockCount * sizeof(BlockInfo));
-
- // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
- const size_t moveCount = moves.size();
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
- blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
- blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
- }
-
- VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
-
- // Go over all blocks. Get mapped pointer or map if necessary.
- for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
- {
- BlockInfo& currBlockInfo = blockInfo[blockIndex];
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
- {
- currBlockInfo.pMappedData = pBlock->GetMappedData();
- // It is not originally mapped - map it.
- if(currBlockInfo.pMappedData == VMA_NULL)
- {
- pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
- if(pDefragCtx->res == VK_SUCCESS)
- {
- currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
- }
- }
- }
- }
-
- // Go over all moves. Do actual data transfer.
- if(pDefragCtx->res == VK_SUCCESS)
- {
- const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
- VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
-
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
-
- const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
- const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
-
- VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
-
- // Invalidate source.
- if(isNonCoherent)
- {
- VmaDeviceMemoryBlock* const pSrcBlock = m_Blocks[move.srcBlockIndex];
- memRange.memory = pSrcBlock->GetDeviceMemory();
- memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
- memRange.size = VMA_MIN(
- VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
- pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
- (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
- }
-
- // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
- memmove(
- reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
- reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
- static_cast<size_t>(move.size));
-
- if(IsCorruptionDetectionEnabled())
- {
- VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
- VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
- }
-
- // Flush destination.
- if(isNonCoherent)
- {
- VmaDeviceMemoryBlock* const pDstBlock = m_Blocks[move.dstBlockIndex];
- memRange.memory = pDstBlock->GetDeviceMemory();
- memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
- memRange.size = VMA_MIN(
- VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
- pDstBlock->m_pMetadata->GetSize() - memRange.offset);
- (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
- }
- }
- }
-
- // Go over all blocks in reverse order. Unmap those that were mapped just for defragmentation.
- // Regardless of pCtx->res == VK_SUCCESS.
- for(size_t blockIndex = blockCount; blockIndex--; )
- {
- const BlockInfo& currBlockInfo = blockInfo[blockIndex];
- if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
- {
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- pBlock->Unmap(m_hAllocator, 1);
- }
- }
+ class VmaBlockVectorDefragmentationContext* pDefragCtx,
+ const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
+{
+ const size_t blockCount = m_Blocks.size();
+ const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
+
+ enum BLOCK_FLAG
+ {
+ BLOCK_FLAG_USED = 0x00000001,
+ BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
+ };
+
+ struct BlockInfo
+ {
+ uint32_t flags;
+ void* pMappedData;
+ };
+ VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
+ blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
+ memset(blockInfo.data(), 0, blockCount * sizeof(BlockInfo));
+
+ // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
+ const size_t moveCount = moves.size();
+ for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+ {
+ const VmaDefragmentationMove& move = moves[moveIndex];
+ blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
+ blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
+ }
+
+ VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
+
+ // Go over all blocks. Get mapped pointer or map if necessary.
+ for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+ {
+ BlockInfo& currBlockInfo = blockInfo[blockIndex];
+ VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+ if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
+ {
+ currBlockInfo.pMappedData = pBlock->GetMappedData();
+ // It is not originally mapped - map it.
+ if(currBlockInfo.pMappedData == VMA_NULL)
+ {
+ pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
+ if(pDefragCtx->res == VK_SUCCESS)
+ {
+ currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
+ }
+ }
+ }
+ }
+
+ // Go over all moves. Do actual data transfer.
+ if(pDefragCtx->res == VK_SUCCESS)
+ {
+ const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+ VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
+
+ for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+ {
+ const VmaDefragmentationMove& move = moves[moveIndex];
+
+ const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
+ const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
+
+ VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
+
+ // Invalidate source.
+ if(isNonCoherent)
+ {
+ VmaDeviceMemoryBlock* const pSrcBlock = m_Blocks[move.srcBlockIndex];
+ memRange.memory = pSrcBlock->GetDeviceMemory();
+ memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
+ memRange.size = VMA_MIN(
+ VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
+ pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
+ (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
+ }
+
+ // THE PLACE WHERE ACTUAL DATA COPY HAPPENS.
+ memmove(
+ reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
+ reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
+ static_cast<size_t>(move.size));
+
+ if(IsCorruptionDetectionEnabled())
+ {
+ VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
+ VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
+ }
+
+ // Flush destination.
+ if(isNonCoherent)
+ {
+ VmaDeviceMemoryBlock* const pDstBlock = m_Blocks[move.dstBlockIndex];
+ memRange.memory = pDstBlock->GetDeviceMemory();
+ memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
+ memRange.size = VMA_MIN(
+ VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
+ pDstBlock->m_pMetadata->GetSize() - memRange.offset);
+ (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
+ }
+ }
+ }
+
+ // Go over all blocks in reverse order. Unmap those that were mapped just for defragmentation.
+ // Regardless of pCtx->res == VK_SUCCESS.
+ for(size_t blockIndex = blockCount; blockIndex--; )
+ {
+ const BlockInfo& currBlockInfo = blockInfo[blockIndex];
+ if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
+ {
+ VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+ pBlock->Unmap(m_hAllocator, 1);
+ }
+ }
}
void VmaBlockVector::ApplyDefragmentationMovesGpu(
- class VmaBlockVectorDefragmentationContext* pDefragCtx,
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkCommandBuffer commandBuffer)
-{
- const size_t blockCount = m_Blocks.size();
-
- pDefragCtx->blockContexts.resize(blockCount);
- memset(pDefragCtx->blockContexts.data(), 0, blockCount * sizeof(VmaBlockDefragmentationContext));
-
- // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
- const size_t moveCount = moves.size();
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
-
- //if(move.type == VMA_ALLOCATION_TYPE_UNKNOWN)
- {
- // Old school move still require us to map the whole block
- pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
- pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
- }
- }
-
- VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
-
- // Go over all blocks. Create and bind buffer for whole block if necessary.
- {
- VkBufferCreateInfo bufCreateInfo;
- VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
-
- for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
- {
- VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
- {
- bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
- pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
- m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
- if(pDefragCtx->res == VK_SUCCESS)
- {
- pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
- m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
- }
- }
- }
- }
-
- // Go over all moves. Post data transfer commands to command buffer.
- if(pDefragCtx->res == VK_SUCCESS)
- {
- for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
- {
- const VmaDefragmentationMove& move = moves[moveIndex];
-
- const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
- const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
-
- VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
-
- VkBufferCopy region = {
- move.srcOffset,
- move.dstOffset,
- move.size };
- (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
- commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, &region);
- }
- }
-
- // Save buffers to defrag context for later destruction.
- if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
- {
- pDefragCtx->res = VK_NOT_READY;
- }
+ class VmaBlockVectorDefragmentationContext* pDefragCtx,
+ const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkCommandBuffer commandBuffer)
+{
+ const size_t blockCount = m_Blocks.size();
+
+ pDefragCtx->blockContexts.resize(blockCount);
+ memset(pDefragCtx->blockContexts.data(), 0, blockCount * sizeof(VmaBlockDefragmentationContext));
+
+ // Go over all moves. Mark blocks that are used with BLOCK_FLAG_USED.
+ const size_t moveCount = moves.size();
+ for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+ {
+ const VmaDefragmentationMove& move = moves[moveIndex];
+ pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
+ pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
+ }
+
+ VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
+
+ // Go over all blocks. Create and bind buffer for whole block if necessary.
+ {
+ VkBufferCreateInfo bufCreateInfo;
+ VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
+
+ for(size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
+ {
+ VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
+ VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+ if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
+ {
+ bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
+ pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
+ m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
+ if(pDefragCtx->res == VK_SUCCESS)
+ {
+ pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
+ m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
+ }
+ }
+ }
+ }
+
+ // Go over all moves. Post data transfer commands to command buffer.
+ if(pDefragCtx->res == VK_SUCCESS)
+ {
+ for(size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
+ {
+ const VmaDefragmentationMove& move = moves[moveIndex];
+
+ const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
+ const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
+
+ VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
+
+ VkBufferCopy region = {
+ move.srcOffset,
+ move.dstOffset,
+ move.size };
+ (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
+ commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, &region);
+ }
+ }
+
+ // Save buffers to defrag context for later destruction.
+ if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
+ {
+ pDefragCtx->res = VK_NOT_READY;
+ }
}
void VmaBlockVector::FreeEmptyBlocks(VmaDefragmentationStats* pDefragmentationStats)
{
- for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
- {
- VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
- if(pBlock->m_pMetadata->IsEmpty())
- {
- if(m_Blocks.size() > m_MinBlockCount)
- {
- if(pDefragmentationStats != VMA_NULL)
- {
- ++pDefragmentationStats->deviceMemoryBlocksFreed;
- pDefragmentationStats->bytesFreed += pBlock->m_pMetadata->GetSize();
- }
-
- VmaVectorRemove(m_Blocks, blockIndex);
- pBlock->Destroy(m_hAllocator);
- vma_delete(m_hAllocator, pBlock);
- }
- else
- {
- break;
- }
- }
- }
- UpdateHasEmptyBlock();
+ for(size_t blockIndex = m_Blocks.size(); blockIndex--; )
+ {
+ VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
+ if(pBlock->m_pMetadata->IsEmpty())
+ {
+ if(m_Blocks.size() > m_MinBlockCount)
+ {
+ if(pDefragmentationStats != VMA_NULL)
+ {
+ ++pDefragmentationStats->deviceMemoryBlocksFreed;
+ pDefragmentationStats->bytesFreed += pBlock->m_pMetadata->GetSize();
+ }
+
+ VmaVectorRemove(m_Blocks, blockIndex);
+ pBlock->Destroy(m_hAllocator);
+ vma_delete(m_hAllocator, pBlock);
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+ UpdateHasEmptyBlock();
}
void VmaBlockVector::UpdateHasEmptyBlock()
{
- m_HasEmptyBlock = false;
- for(size_t index = 0, count = m_Blocks.size(); index < count; ++index)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[index];
- if(pBlock->m_pMetadata->IsEmpty())
- {
- m_HasEmptyBlock = true;
- break;
- }
- }
+ m_HasEmptyBlock = false;
+ for(size_t index = 0, count = m_Blocks.size(); index < count; ++index)
+ {
+ VmaDeviceMemoryBlock* const pBlock = m_Blocks[index];
+ if(pBlock->m_pMetadata->IsEmpty())
+ {
+ m_HasEmptyBlock = true;
+ break;
+ }
+ }
}
#if VMA_STATS_STRING_ENABLED
void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json)
{
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- json.BeginObject();
-
- if(IsCustomPool())
- {
- const char* poolName = m_hParentPool->GetName();
- if(poolName != VMA_NULL && poolName[0] != '\0')
- {
- json.WriteString("Name");
- json.WriteString(poolName);
- }
-
- json.WriteString("MemoryTypeIndex");
- json.WriteNumber(m_MemoryTypeIndex);
-
- json.WriteString("BlockSize");
- json.WriteNumber(m_PreferredBlockSize);
-
- json.WriteString("BlockCount");
- json.BeginObject(true);
- if(m_MinBlockCount > 0)
- {
- json.WriteString("Min");
- json.WriteNumber((uint64_t)m_MinBlockCount);
- }
- if(m_MaxBlockCount < SIZE_MAX)
- {
- json.WriteString("Max");
- json.WriteNumber((uint64_t)m_MaxBlockCount);
- }
- json.WriteString("Cur");
- json.WriteNumber((uint64_t)m_Blocks.size());
- json.EndObject();
-
- if(m_FrameInUseCount > 0)
- {
- json.WriteString("FrameInUseCount");
- json.WriteNumber(m_FrameInUseCount);
- }
-
- if(m_Algorithm != 0)
- {
- json.WriteString("Algorithm");
- json.WriteString(VmaAlgorithmToStr(m_Algorithm));
- }
- }
- else
- {
- json.WriteString("PreferredBlockSize");
- json.WriteNumber(m_PreferredBlockSize);
- }
-
- json.WriteString("Blocks");
- json.BeginObject();
- for(size_t i = 0; i < m_Blocks.size(); ++i)
- {
- json.BeginString();
- json.ContinueString(m_Blocks[i]->GetId());
- json.EndString();
-
- m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
- }
- json.EndObject();
-
- json.EndObject();
+ VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+
+ json.BeginObject();
+
+ if(IsCustomPool())
+ {
+ const char* poolName = m_hParentPool->GetName();
+ if(poolName != VMA_NULL && poolName[0] != '\0')
+ {
+ json.WriteString("Name");
+ json.WriteString(poolName);
+ }
+
+ json.WriteString("MemoryTypeIndex");
+ json.WriteNumber(m_MemoryTypeIndex);
+
+ json.WriteString("BlockSize");
+ json.WriteNumber(m_PreferredBlockSize);
+
+ json.WriteString("BlockCount");
+ json.BeginObject(true);
+ if(m_MinBlockCount > 0)
+ {
+ json.WriteString("Min");
+ json.WriteNumber((uint64_t)m_MinBlockCount);
+ }
+ if(m_MaxBlockCount < SIZE_MAX)
+ {
+ json.WriteString("Max");
+ json.WriteNumber((uint64_t)m_MaxBlockCount);
+ }
+ json.WriteString("Cur");
+ json.WriteNumber((uint64_t)m_Blocks.size());
+ json.EndObject();
+
+ if(m_FrameInUseCount > 0)
+ {
+ json.WriteString("FrameInUseCount");
+ json.WriteNumber(m_FrameInUseCount);
+ }
+
+ if(m_Algorithm != 0)
+ {
+ json.WriteString("Algorithm");
+ json.WriteString(VmaAlgorithmToStr(m_Algorithm));
+ }
+ }
+ else
+ {
+ json.WriteString("PreferredBlockSize");
+ json.WriteNumber(m_PreferredBlockSize);
+ }
+
+ json.WriteString("Blocks");
+ json.BeginObject();
+ for(size_t i = 0; i < m_Blocks.size(); ++i)
+ {
+ json.BeginString();
+ json.ContinueString(m_Blocks[i]->GetId());
+ json.EndString();
+
+ m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
+ }
+ json.EndObject();
+
+ json.EndObject();
}
#endif // #if VMA_STATS_STRING_ENABLED
void VmaBlockVector::Defragment(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats, VmaDefragmentationFlags flags,
- VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
- VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer)
-{
- pCtx->res = VK_SUCCESS;
-
- const VkMemoryPropertyFlags memPropFlags =
- m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
- const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
-
- const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
- isHostVisible;
- const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
- !IsCorruptionDetectionEnabled() &&
- ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
-
- // There are options to defragment this memory type.
- if(canDefragmentOnCpu || canDefragmentOnGpu)
- {
- bool defragmentOnGpu;
- // There is only one option to defragment this memory type.
- if(canDefragmentOnGpu != canDefragmentOnCpu)
- {
- defragmentOnGpu = canDefragmentOnGpu;
- }
- // Both options are available: Heuristics to choose the best one.
- else
- {
- defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
- m_hAllocator->IsIntegratedGpu();
- }
-
- bool overlappingMoveSupported = !defragmentOnGpu;
-
- if(m_hAllocator->m_UseMutex)
- {
- if(flags & VMA_DEFRAGMENTATION_FLAG_INCREMENTAL)
- {
- if(!m_Mutex.TryLockWrite())
- {
- pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
- return;
- }
- }
- else
- {
- m_Mutex.LockWrite();
- pCtx->mutexLocked = true;
- }
- }
-
- pCtx->Begin(overlappingMoveSupported, flags);
-
- // Defragment.
-
- const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
- const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
- pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
-
- // Accumulate statistics.
- if(pStats != VMA_NULL)
- {
- const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
- const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
- pStats->bytesMoved += bytesMoved;
- pStats->allocationsMoved += allocationsMoved;
- VMA_ASSERT(bytesMoved <= maxBytesToMove);
- VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
- if(defragmentOnGpu)
- {
- maxGpuBytesToMove -= bytesMoved;
- maxGpuAllocationsToMove -= allocationsMoved;
- }
- else
- {
- maxCpuBytesToMove -= bytesMoved;
- maxCpuAllocationsToMove -= allocationsMoved;
- }
- }
-
- if(flags & VMA_DEFRAGMENTATION_FLAG_INCREMENTAL)
- {
- if(m_hAllocator->m_UseMutex)
- m_Mutex.UnlockWrite();
-
- if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
- pCtx->res = VK_NOT_READY;
-
- return;
- }
-
- if(pCtx->res >= VK_SUCCESS)
- {
- if(defragmentOnGpu)
- {
- ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
- }
- else
- {
- ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
- }
- }
- }
+ class VmaBlockVectorDefragmentationContext* pCtx,
+ VmaDefragmentationStats* pStats,
+ VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
+ VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
+ VkCommandBuffer commandBuffer)
+{
+ pCtx->res = VK_SUCCESS;
+
+ const VkMemoryPropertyFlags memPropFlags =
+ m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
+ const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
+
+ const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
+ isHostVisible;
+ const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
+ !IsCorruptionDetectionEnabled() &&
+ ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
+
+ // There are options to defragment this memory type.
+ if(canDefragmentOnCpu || canDefragmentOnGpu)
+ {
+ bool defragmentOnGpu;
+ // There is only one option to defragment this memory type.
+ if(canDefragmentOnGpu != canDefragmentOnCpu)
+ {
+ defragmentOnGpu = canDefragmentOnGpu;
+ }
+ // Both options are available: Heuristics to choose the best one.
+ else
+ {
+ defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
+ m_hAllocator->IsIntegratedGpu();
+ }
+
+ bool overlappingMoveSupported = !defragmentOnGpu;
+
+ if(m_hAllocator->m_UseMutex)
+ {
+ m_Mutex.LockWrite();
+ pCtx->mutexLocked = true;
+ }
+
+ pCtx->Begin(overlappingMoveSupported);
+
+ // Defragment.
+
+ const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
+ const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
+ pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
+
+ // Accumulate statistics.
+ if(pStats != VMA_NULL)
+ {
+ const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
+ const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
+ pStats->bytesMoved += bytesMoved;
+ pStats->allocationsMoved += allocationsMoved;
+ VMA_ASSERT(bytesMoved <= maxBytesToMove);
+ VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
+ if(defragmentOnGpu)
+ {
+ maxGpuBytesToMove -= bytesMoved;
+ maxGpuAllocationsToMove -= allocationsMoved;
+ }
+ else
+ {
+ maxCpuBytesToMove -= bytesMoved;
+ maxCpuAllocationsToMove -= allocationsMoved;
+ }
+ }
+
+ if(pCtx->res >= VK_SUCCESS)
+ {
+ if(defragmentOnGpu)
+ {
+ ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
+ }
+ else
+ {
+ ApplyDefragmentationMovesCpu(pCtx, moves);
+ }
+ }
+ }
}
void VmaBlockVector::DefragmentationEnd(
- class VmaBlockVectorDefragmentationContext* pCtx,
- VmaDefragmentationStats* pStats)
-{
- // Destroy buffers.
- for(size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
- {
- VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
- if(blockCtx.hBuffer)
- {
- (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
- m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
- }
- }
-
- if(pCtx->res >= VK_SUCCESS)
- {
- FreeEmptyBlocks(pStats);
- }
-
- if(pCtx->mutexLocked)
- {
- VMA_ASSERT(m_hAllocator->m_UseMutex);
- m_Mutex.UnlockWrite();
- }
-}
-
-uint32_t VmaBlockVector::ProcessDefragmentations(
- class VmaBlockVectorDefragmentationContext *pCtx,
- VmaDefragmentationPassMoveInfo* pMove, uint32_t maxMoves)
-{
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
-
- for(uint32_t i = 0; i < moveCount; ++ i)
- {
- VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
-
- pMove->allocation = move.hAllocation;
- pMove->memory = move.pDstBlock->GetDeviceMemory();
- pMove->offset = move.dstOffset;
-
- ++ pMove;
- }
-
- pCtx->defragmentationMovesProcessed += moveCount;
-
- return moveCount;
-}
-
-void VmaBlockVector::CommitDefragmentations(
- class VmaBlockVectorDefragmentationContext *pCtx,
- VmaDefragmentationStats* pStats)
-{
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
-
- for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
- {
- const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
-
- move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
- move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
- }
-
- pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
- FreeEmptyBlocks(pStats);
+ class VmaBlockVectorDefragmentationContext* pCtx,
+ VmaDefragmentationStats* pStats)
+{
+ // Destroy buffers.
+ for(size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
+ {
+ VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
+ if(blockCtx.hBuffer)
+ {
+ (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
+ m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
+ }
+ }
+
+ if(pCtx->res >= VK_SUCCESS)
+ {
+ FreeEmptyBlocks(pStats);
+ }
+
+ if(pCtx->mutexLocked)
+ {
+ VMA_ASSERT(m_hAllocator->m_UseMutex);
+ m_Mutex.UnlockWrite();
+ }
}
size_t VmaBlockVector::CalcAllocationCount() const
{
- size_t result = 0;
- for(size_t i = 0; i < m_Blocks.size(); ++i)
- {
- result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
- }
- return result;
+ size_t result = 0;
+ for(size_t i = 0; i < m_Blocks.size(); ++i)
+ {
+ result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
+ }
+ return result;
}
bool VmaBlockVector::IsBufferImageGranularityConflictPossible() const
{
- if(m_BufferImageGranularity == 1)
- {
- return false;
- }
- VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
- for(size_t i = 0, count = m_Blocks.size(); i < count; ++i)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[i];
- VMA_ASSERT(m_Algorithm == 0);
- VmaBlockMetadata_Generic* const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
- if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
- {
- return true;
- }
- }
- return false;
+ if(m_BufferImageGranularity == 1)
+ {
+ return false;
+ }
+ VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
+ for(size_t i = 0, count = m_Blocks.size(); i < count; ++i)
+ {
+ VmaDeviceMemoryBlock* const pBlock = m_Blocks[i];
+ VMA_ASSERT(m_Algorithm == 0);
+ VmaBlockMetadata_Generic* const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
+ if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
+ {
+ return true;
+ }
+ }
+ return false;
}
void VmaBlockVector::MakePoolAllocationsLost(
- uint32_t currentFrameIndex,
- size_t* pLostAllocationCount)
-{
- VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
- size_t lostAllocationCount = 0;
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
- }
- if(pLostAllocationCount != VMA_NULL)
- {
- *pLostAllocationCount = lostAllocationCount;
- }
+ uint32_t currentFrameIndex,
+ size_t* pLostAllocationCount)
+{
+ VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
+ size_t lostAllocationCount = 0;
+ for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+ {
+ VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pBlock);
+ lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
+ }
+ if(pLostAllocationCount != VMA_NULL)
+ {
+ *pLostAllocationCount = lostAllocationCount;
+ }
}
VkResult VmaBlockVector::CheckCorruption()
{
- if(!IsCorruptionDetectionEnabled())
- {
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
-
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- VkResult res = pBlock->CheckCorruption(m_hAllocator);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- }
- return VK_SUCCESS;
+ if(!IsCorruptionDetectionEnabled())
+ {
+ return VK_ERROR_FEATURE_NOT_PRESENT;
+ }
+
+ VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+ for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+ {
+ VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pBlock);
+ VkResult res = pBlock->CheckCorruption(m_hAllocator);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
+ }
+ return VK_SUCCESS;
}
void VmaBlockVector::AddStats(VmaStats* pStats)
{
- const uint32_t memTypeIndex = m_MemoryTypeIndex;
- const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
+ const uint32_t memTypeIndex = m_MemoryTypeIndex;
+ const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
- VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
+ VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
- for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
- {
- const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
- VMA_ASSERT(pBlock);
- VMA_HEAVY_ASSERT(pBlock->Validate());
- VmaStatInfo allocationStatInfo;
- pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
- VmaAddStatInfo(pStats->total, allocationStatInfo);
- VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
- VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
- }
+ for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
+ {
+ const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex];
+ VMA_ASSERT(pBlock);
+ VMA_HEAVY_ASSERT(pBlock->Validate());
+ VmaStatInfo allocationStatInfo;
+ pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
+ VmaAddStatInfo(pStats->total, allocationStatInfo);
+ VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
+ VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
+ }
}
////////////////////////////////////////////////////////////////////////////////
// VmaDefragmentationAlgorithm_Generic members definition
VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported) :
- VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
- m_AllocationCount(0),
- m_AllAllocations(false),
- m_BytesMoved(0),
- m_AllocationsMoved(0),
- m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
-{
- // Create block info for each block.
- const size_t blockCount = m_pBlockVector->m_Blocks.size();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
- pBlockInfo->m_OriginalBlockIndex = blockIndex;
- pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
- m_Blocks.push_back(pBlockInfo);
- }
-
- // Sort them by m_pBlock pointer value.
- VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
+ VmaAllocator hAllocator,
+ VmaBlockVector* pBlockVector,
+ uint32_t currentFrameIndex,
+ bool overlappingMoveSupported) :
+ VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
+ m_AllocationCount(0),
+ m_AllAllocations(false),
+ m_BytesMoved(0),
+ m_AllocationsMoved(0),
+ m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
+{
+ // Create block info for each block.
+ const size_t blockCount = m_pBlockVector->m_Blocks.size();
+ for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+ {
+ BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
+ pBlockInfo->m_OriginalBlockIndex = blockIndex;
+ pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
+ m_Blocks.push_back(pBlockInfo);
+ }
+
+ // Sort them by m_pBlock pointer value.
+ VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
}
VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
{
- for(size_t i = m_Blocks.size(); i--; )
- {
- vma_delete(m_hAllocator, m_Blocks[i]);
- }
+ for(size_t i = m_Blocks.size(); i--; )
+ {
+ vma_delete(m_hAllocator, m_Blocks[i]);
+ }
}
void VmaDefragmentationAlgorithm_Generic::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
{
- // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
- if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
- {
- VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
- BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
- if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
- {
- AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
- (*it)->m_Allocations.push_back(allocInfo);
- }
- else
- {
- VMA_ASSERT(0);
- }
-
- ++m_AllocationCount;
- }
+ // Now as we are inside VmaBlockVector::m_Mutex, we can make final check if this allocation was not lost.
+ if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
+ {
+ VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
+ BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
+ if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
+ {
+ AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
+ (*it)->m_Allocations.push_back(allocInfo);
+ }
+ else
+ {
+ VMA_ASSERT(0);
+ }
+
+ ++m_AllocationCount;
+ }
}
VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- bool freeOldAllocations)
-{
- if(m_Blocks.empty())
- {
- return VK_SUCCESS;
- }
-
- // This is a choice based on research.
- // Option 1:
- uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT;
- // Option 2:
- //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT;
- // Option 3:
- //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT;
-
- size_t srcBlockMinIndex = 0;
- // When FAST_ALGORITHM, move allocations from only last out of blocks that contain non-movable allocations.
- /*
- if(m_AlgorithmFlags & VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT)
- {
- const size_t blocksWithNonMovableCount = CalcBlocksWithNonMovableCount();
- if(blocksWithNonMovableCount > 0)
- {
- srcBlockMinIndex = blocksWithNonMovableCount - 1;
- }
- }
- */
-
- size_t srcBlockIndex = m_Blocks.size() - 1;
- size_t srcAllocIndex = SIZE_MAX;
- for(;;)
- {
- // 1. Find next allocation to move.
- // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
- // 1.2. Then start from last to first m_Allocations.
- while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
- {
- if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
- {
- // Finished: no more allocations to process.
- if(srcBlockIndex == srcBlockMinIndex)
- {
- return VK_SUCCESS;
- }
- else
- {
- --srcBlockIndex;
- srcAllocIndex = SIZE_MAX;
- }
- }
- else
- {
- srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
- }
- }
-
- BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
- AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
-
- const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
- const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
- const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
- const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
-
- // 2. Try to find new place for this allocation in preceding or current block.
- for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
- {
- BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
- VmaAllocationRequest dstAllocRequest;
- if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
- m_CurrentFrameIndex,
- m_pBlockVector->GetFrameInUseCount(),
- m_pBlockVector->GetBufferImageGranularity(),
- size,
- alignment,
- false, // upperAddress
- suballocType,
- false, // canMakeOtherLost
- strategy,
- &dstAllocRequest) &&
- MoveMakesSense(
- dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
- {
- VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
-
- // Reached limit on number of allocations or bytes to move.
- if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
- (m_BytesMoved + size > maxBytesToMove))
- {
- return VK_SUCCESS;
- }
-
- VmaDefragmentationMove move = {};
- move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
- move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
- move.srcOffset = srcOffset;
- move.dstOffset = dstAllocRequest.offset;
- move.size = size;
- move.hAllocation = allocInfo.m_hAllocation;
- move.pSrcBlock = pSrcBlockInfo->m_pBlock;
- move.pDstBlock = pDstBlockInfo->m_pBlock;
-
- moves.push_back(move);
-
- pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
- dstAllocRequest,
- suballocType,
- size,
- allocInfo.m_hAllocation);
-
- if(freeOldAllocations)
- {
- pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
- allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
- }
-
- if(allocInfo.m_pChanged != VMA_NULL)
- {
- *allocInfo.m_pChanged = VK_TRUE;
- }
-
- ++m_AllocationsMoved;
- m_BytesMoved += size;
-
- VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
-
- break;
- }
- }
-
- // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
-
- if(srcAllocIndex > 0)
- {
- --srcAllocIndex;
- }
- else
- {
- if(srcBlockIndex > 0)
- {
- --srcBlockIndex;
- srcAllocIndex = SIZE_MAX;
- }
- else
- {
- return VK_SUCCESS;
- }
- }
- }
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove)
+{
+ if(m_Blocks.empty())
+ {
+ return VK_SUCCESS;
+ }
+
+ // This is a choice based on research.
+ // Option 1:
+ uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT;
+ // Option 2:
+ //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT;
+ // Option 3:
+ //uint32_t strategy = VMA_ALLOCATION_CREATE_STRATEGY_MIN_FRAGMENTATION_BIT;
+
+ size_t srcBlockMinIndex = 0;
+ // When FAST_ALGORITHM, move allocations from only last out of blocks that contain non-movable allocations.
+ /*
+ if(m_AlgorithmFlags & VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT)
+ {
+ const size_t blocksWithNonMovableCount = CalcBlocksWithNonMovableCount();
+ if(blocksWithNonMovableCount > 0)
+ {
+ srcBlockMinIndex = blocksWithNonMovableCount - 1;
+ }
+ }
+ */
+
+ size_t srcBlockIndex = m_Blocks.size() - 1;
+ size_t srcAllocIndex = SIZE_MAX;
+ for(;;)
+ {
+ // 1. Find next allocation to move.
+ // 1.1. Start from last to first m_Blocks - they are sorted from most "destination" to most "source".
+ // 1.2. Then start from last to first m_Allocations.
+ while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
+ {
+ if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
+ {
+ // Finished: no more allocations to process.
+ if(srcBlockIndex == srcBlockMinIndex)
+ {
+ return VK_SUCCESS;
+ }
+ else
+ {
+ --srcBlockIndex;
+ srcAllocIndex = SIZE_MAX;
+ }
+ }
+ else
+ {
+ srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
+ }
+ }
+
+ BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
+ AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
+
+ const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
+ const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
+ const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
+ const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
+
+ // 2. Try to find new place for this allocation in preceding or current block.
+ for(size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
+ {
+ BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
+ VmaAllocationRequest dstAllocRequest;
+ if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
+ m_CurrentFrameIndex,
+ m_pBlockVector->GetFrameInUseCount(),
+ m_pBlockVector->GetBufferImageGranularity(),
+ size,
+ alignment,
+ false, // upperAddress
+ suballocType,
+ false, // canMakeOtherLost
+ strategy,
+ &dstAllocRequest) &&
+ MoveMakesSense(
+ dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
+ {
+ VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
+
+ // Reached limit on number of allocations or bytes to move.
+ if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
+ (m_BytesMoved + size > maxBytesToMove))
+ {
+ return VK_SUCCESS;
+ }
+
+ VmaDefragmentationMove move;
+ move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
+ move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
+ move.srcOffset = srcOffset;
+ move.dstOffset = dstAllocRequest.offset;
+ move.size = size;
+ moves.push_back(move);
+
+ pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
+ dstAllocRequest,
+ suballocType,
+ size,
+ allocInfo.m_hAllocation);
+ pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
+
+ allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
+
+ if(allocInfo.m_pChanged != VMA_NULL)
+ {
+ *allocInfo.m_pChanged = VK_TRUE;
+ }
+
+ ++m_AllocationsMoved;
+ m_BytesMoved += size;
+
+ VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
+
+ break;
+ }
+ }
+
+ // If not processed, this allocInfo remains in pBlockInfo->m_Allocations for next round.
+
+ if(srcAllocIndex > 0)
+ {
+ --srcAllocIndex;
+ }
+ else
+ {
+ if(srcBlockIndex > 0)
+ {
+ --srcBlockIndex;
+ srcAllocIndex = SIZE_MAX;
+ }
+ else
+ {
+ return VK_SUCCESS;
+ }
+ }
+ }
}
size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount() const
{
- size_t result = 0;
- for(size_t i = 0; i < m_Blocks.size(); ++i)
- {
- if(m_Blocks[i]->m_HasNonMovableAllocations)
- {
- ++result;
- }
- }
- return result;
+ size_t result = 0;
+ for(size_t i = 0; i < m_Blocks.size(); ++i)
+ {
+ if(m_Blocks[i]->m_HasNonMovableAllocations)
+ {
+ ++result;
+ }
+ }
+ return result;
}
VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- VmaDefragmentationFlags flags)
-{
- if(!m_AllAllocations && m_AllocationCount == 0)
- {
- return VK_SUCCESS;
- }
-
- const size_t blockCount = m_Blocks.size();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- BlockInfo* pBlockInfo = m_Blocks[blockIndex];
-
- if(m_AllAllocations)
- {
- VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
- for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
- it != pMetadata->m_Suballocations.end();
- ++it)
- {
- if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
- {
- AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
- pBlockInfo->m_Allocations.push_back(allocInfo);
- }
- }
- }
-
- pBlockInfo->CalcHasNonMovableAllocations();
-
- // This is a choice based on research.
- // Option 1:
- pBlockInfo->SortAllocationsByOffsetDescending();
- // Option 2:
- //pBlockInfo->SortAllocationsBySizeDescending();
- }
-
- // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
- VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
-
- // This is a choice based on research.
- const uint32_t roundCount = 2;
-
- // Execute defragmentation rounds (the main part).
- VkResult result = VK_SUCCESS;
- for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
- {
- result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove, !(flags & VMA_DEFRAGMENTATION_FLAG_INCREMENTAL));
- }
-
- return result;
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove)
+{
+ if(!m_AllAllocations && m_AllocationCount == 0)
+ {
+ return VK_SUCCESS;
+ }
+
+ const size_t blockCount = m_Blocks.size();
+ for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+ {
+ BlockInfo* pBlockInfo = m_Blocks[blockIndex];
+
+ if(m_AllAllocations)
+ {
+ VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
+ for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
+ it != pMetadata->m_Suballocations.end();
+ ++it)
+ {
+ if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
+ pBlockInfo->m_Allocations.push_back(allocInfo);
+ }
+ }
+ }
+
+ pBlockInfo->CalcHasNonMovableAllocations();
+
+ // This is a choice based on research.
+ // Option 1:
+ pBlockInfo->SortAllocationsByOffsetDescending();
+ // Option 2:
+ //pBlockInfo->SortAllocationsBySizeDescending();
+ }
+
+ // Sort m_Blocks this time by the main criterium, from most "destination" to most "source" blocks.
+ VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
+
+ // This is a choice based on research.
+ const uint32_t roundCount = 2;
+
+ // Execute defragmentation rounds (the main part).
+ VkResult result = VK_SUCCESS;
+ for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
+ {
+ result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
+ }
+
+ return result;
}
bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
- size_t dstBlockIndex, VkDeviceSize dstOffset,
- size_t srcBlockIndex, VkDeviceSize srcOffset)
-{
- if(dstBlockIndex < srcBlockIndex)
- {
- return true;
- }
- if(dstBlockIndex > srcBlockIndex)
- {
- return false;
- }
- if(dstOffset < srcOffset)
- {
- return true;
- }
- return false;
+ size_t dstBlockIndex, VkDeviceSize dstOffset,
+ size_t srcBlockIndex, VkDeviceSize srcOffset)
+{
+ if(dstBlockIndex < srcBlockIndex)
+ {
+ return true;
+ }
+ if(dstBlockIndex > srcBlockIndex)
+ {
+ return false;
+ }
+ if(dstOffset < srcOffset)
+ {
+ return true;
+ }
+ return false;
}
////////////////////////////////////////////////////////////////////////////////
// VmaDefragmentationAlgorithm_Fast
VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
- VmaAllocator hAllocator,
- VmaBlockVector* pBlockVector,
- uint32_t currentFrameIndex,
- bool overlappingMoveSupported) :
- VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
- m_OverlappingMoveSupported(overlappingMoveSupported),
- m_AllocationCount(0),
- m_AllAllocations(false),
- m_BytesMoved(0),
- m_AllocationsMoved(0),
- m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
+ VmaAllocator hAllocator,
+ VmaBlockVector* pBlockVector,
+ uint32_t currentFrameIndex,
+ bool overlappingMoveSupported) :
+ VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
+ m_OverlappingMoveSupported(overlappingMoveSupported),
+ m_AllocationCount(0),
+ m_AllAllocations(false),
+ m_BytesMoved(0),
+ m_AllocationsMoved(0),
+ m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
{
- VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
+ VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
}
@@ -13558,782 +13322,622 @@ VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
}
VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
- VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
- VkDeviceSize maxBytesToMove,
- uint32_t maxAllocationsToMove,
- VmaDefragmentationFlags flags)
-{
- VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
-
- const size_t blockCount = m_pBlockVector->GetBlockCount();
- if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
- {
- return VK_SUCCESS;
- }
-
- PreprocessMetadata();
-
- // Sort blocks in order from most destination.
-
- m_BlockInfos.resize(blockCount);
- for(size_t i = 0; i < blockCount; ++i)
- {
- m_BlockInfos[i].origBlockIndex = i;
- }
-
- VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [this](const BlockInfo& lhs, const BlockInfo& rhs) -> bool {
- return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
- m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
- });
-
- // THE MAIN ALGORITHM
-
- FreeSpaceDatabase freeSpaceDb;
-
- size_t dstBlockInfoIndex = 0;
- size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
- VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
- VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
- VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
- VkDeviceSize dstOffset = 0;
-
- bool end = false;
- for(size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
- {
- const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
- VmaDeviceMemoryBlock* const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
- VmaBlockMetadata_Generic* const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
- for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
- !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
- {
- VmaAllocation_T* const pAlloc = srcSuballocIt->hAllocation;
- const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
- const VkDeviceSize srcAllocSize = srcSuballocIt->size;
- if(m_AllocationsMoved == maxAllocationsToMove ||
- m_BytesMoved + srcAllocSize > maxBytesToMove)
- {
- end = true;
- break;
- }
- const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
-
- VmaDefragmentationMove move = {};
- // Try to place it in one of free spaces from the database.
- size_t freeSpaceInfoIndex;
- VkDeviceSize dstAllocOffset;
- if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
- freeSpaceInfoIndex, dstAllocOffset))
- {
- size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
- VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
- VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
-
- // Same block
- if(freeSpaceInfoIndex == srcBlockInfoIndex)
- {
- VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-
- // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
-
- VmaSuballocation suballoc = *srcSuballocIt;
- suballoc.offset = dstAllocOffset;
- suballoc.hAllocation->ChangeOffset(dstAllocOffset);
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
-
- VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
- ++nextSuballocIt;
- pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
- srcSuballocIt = nextSuballocIt;
-
- InsertSuballoc(pFreeSpaceMetadata, suballoc);
-
- move.srcBlockIndex = srcOrigBlockIndex;
- move.dstBlockIndex = freeSpaceOrigBlockIndex;
- move.srcOffset = srcAllocOffset;
- move.dstOffset = dstAllocOffset;
- move.size = srcAllocSize;
-
- moves.push_back(move);
- }
- // Different block
- else
- {
- // MOVE OPTION 2: Move the allocation to a different block.
-
- VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
-
- VmaSuballocation suballoc = *srcSuballocIt;
- suballoc.offset = dstAllocOffset;
- suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
-
- VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
- ++nextSuballocIt;
- pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
- srcSuballocIt = nextSuballocIt;
-
- InsertSuballoc(pFreeSpaceMetadata, suballoc);
-
- move.srcBlockIndex = srcOrigBlockIndex;
- move.dstBlockIndex = freeSpaceOrigBlockIndex;
- move.srcOffset = srcAllocOffset;
- move.dstOffset = dstAllocOffset;
- move.size = srcAllocSize;
-
- moves.push_back(move);
- }
- }
- else
- {
- dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
-
- // If the allocation doesn't fit before the end of dstBlock, forward to next block.
- while(dstBlockInfoIndex < srcBlockInfoIndex &&
- dstAllocOffset + srcAllocSize > dstBlockSize)
- {
- // But before that, register remaining free space at the end of dst block.
- freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
-
- ++dstBlockInfoIndex;
- dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
- pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
- pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
- dstBlockSize = pDstMetadata->GetSize();
- dstOffset = 0;
- dstAllocOffset = 0;
- }
-
- // Same block
- if(dstBlockInfoIndex == srcBlockInfoIndex)
- {
- VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
-
- const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
-
- bool skipOver = overlap;
- if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
- {
- // If destination and source place overlap, skip if it would move it
- // by only < 1/64 of its size.
- skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
- }
-
- if(skipOver)
- {
- freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
-
- dstOffset = srcAllocOffset + srcAllocSize;
- ++srcSuballocIt;
- }
- // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
- else
- {
- srcSuballocIt->offset = dstAllocOffset;
- srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
- dstOffset = dstAllocOffset + srcAllocSize;
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
- ++srcSuballocIt;
-
- move.srcBlockIndex = srcOrigBlockIndex;
- move.dstBlockIndex = dstOrigBlockIndex;
- move.srcOffset = srcAllocOffset;
- move.dstOffset = dstAllocOffset;
- move.size = srcAllocSize;
-
- moves.push_back(move);
- }
- }
- // Different block
- else
- {
- // MOVE OPTION 2: Move the allocation to a different block.
-
- VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
- VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
-
- VmaSuballocation suballoc = *srcSuballocIt;
- suballoc.offset = dstAllocOffset;
- suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
- dstOffset = dstAllocOffset + srcAllocSize;
- m_BytesMoved += srcAllocSize;
- ++m_AllocationsMoved;
-
- VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
- ++nextSuballocIt;
- pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
- srcSuballocIt = nextSuballocIt;
-
- pDstMetadata->m_Suballocations.push_back(suballoc);
-
- move.srcBlockIndex = srcOrigBlockIndex;
- move.dstBlockIndex = dstOrigBlockIndex;
- move.srcOffset = srcAllocOffset;
- move.dstOffset = dstAllocOffset;
- move.size = srcAllocSize;
-
- moves.push_back(move);
- }
- }
- }
- }
-
- m_BlockInfos.clear();
-
- PostprocessMetadata();
-
- return VK_SUCCESS;
+ VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
+ VkDeviceSize maxBytesToMove,
+ uint32_t maxAllocationsToMove)
+{
+ VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
+
+ const size_t blockCount = m_pBlockVector->GetBlockCount();
+ if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
+ {
+ return VK_SUCCESS;
+ }
+
+ PreprocessMetadata();
+
+ // Sort blocks in order from most destination.
+
+ m_BlockInfos.resize(blockCount);
+ for(size_t i = 0; i < blockCount; ++i)
+ {
+ m_BlockInfos[i].origBlockIndex = i;
+ }
+
+ VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [this](const BlockInfo& lhs, const BlockInfo& rhs) -> bool {
+ return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
+ m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
+ });
+
+ // THE MAIN ALGORITHM
+
+ FreeSpaceDatabase freeSpaceDb;
+
+ size_t dstBlockInfoIndex = 0;
+ size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
+ VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
+ VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
+ VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
+ VkDeviceSize dstOffset = 0;
+
+ bool end = false;
+ for(size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
+ {
+ const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
+ VmaDeviceMemoryBlock* const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
+ VmaBlockMetadata_Generic* const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
+ for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
+ !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
+ {
+ VmaAllocation_T* const pAlloc = srcSuballocIt->hAllocation;
+ const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
+ const VkDeviceSize srcAllocSize = srcSuballocIt->size;
+ if(m_AllocationsMoved == maxAllocationsToMove ||
+ m_BytesMoved + srcAllocSize > maxBytesToMove)
+ {
+ end = true;
+ break;
+ }
+ const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
+
+ // Try to place it in one of free spaces from the database.
+ size_t freeSpaceInfoIndex;
+ VkDeviceSize dstAllocOffset;
+ if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
+ freeSpaceInfoIndex, dstAllocOffset))
+ {
+ size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
+ VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
+ VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
+
+ // Same block
+ if(freeSpaceInfoIndex == srcBlockInfoIndex)
+ {
+ VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
+
+ // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
+
+ VmaSuballocation suballoc = *srcSuballocIt;
+ suballoc.offset = dstAllocOffset;
+ suballoc.hAllocation->ChangeOffset(dstAllocOffset);
+ m_BytesMoved += srcAllocSize;
+ ++m_AllocationsMoved;
+
+ VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+ ++nextSuballocIt;
+ pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+ srcSuballocIt = nextSuballocIt;
+
+ InsertSuballoc(pFreeSpaceMetadata, suballoc);
+
+ VmaDefragmentationMove move = {
+ srcOrigBlockIndex, freeSpaceOrigBlockIndex,
+ srcAllocOffset, dstAllocOffset,
+ srcAllocSize };
+ moves.push_back(move);
+ }
+ // Different block
+ else
+ {
+ // MOVE OPTION 2: Move the allocation to a different block.
+
+ VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
+
+ VmaSuballocation suballoc = *srcSuballocIt;
+ suballoc.offset = dstAllocOffset;
+ suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
+ m_BytesMoved += srcAllocSize;
+ ++m_AllocationsMoved;
+
+ VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+ ++nextSuballocIt;
+ pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+ srcSuballocIt = nextSuballocIt;
+
+ InsertSuballoc(pFreeSpaceMetadata, suballoc);
+
+ VmaDefragmentationMove move = {
+ srcOrigBlockIndex, freeSpaceOrigBlockIndex,
+ srcAllocOffset, dstAllocOffset,
+ srcAllocSize };
+ moves.push_back(move);
+ }
+ }
+ else
+ {
+ dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
+
+ // If the allocation doesn't fit before the end of dstBlock, forward to next block.
+ while(dstBlockInfoIndex < srcBlockInfoIndex &&
+ dstAllocOffset + srcAllocSize > dstBlockSize)
+ {
+ // But before that, register remaining free space at the end of dst block.
+ freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
+
+ ++dstBlockInfoIndex;
+ dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
+ pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
+ pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
+ dstBlockSize = pDstMetadata->GetSize();
+ dstOffset = 0;
+ dstAllocOffset = 0;
+ }
+
+ // Same block
+ if(dstBlockInfoIndex == srcBlockInfoIndex)
+ {
+ VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
+
+ const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
+
+ bool skipOver = overlap;
+ if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
+ {
+ // If destination and source place overlap, skip if it would move it
+ // by only < 1/64 of its size.
+ skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
+ }
+
+ if(skipOver)
+ {
+ freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
+
+ dstOffset = srcAllocOffset + srcAllocSize;
+ ++srcSuballocIt;
+ }
+ // MOVE OPTION 1: Move the allocation inside the same block by decreasing offset.
+ else
+ {
+ srcSuballocIt->offset = dstAllocOffset;
+ srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
+ dstOffset = dstAllocOffset + srcAllocSize;
+ m_BytesMoved += srcAllocSize;
+ ++m_AllocationsMoved;
+ ++srcSuballocIt;
+ VmaDefragmentationMove move = {
+ srcOrigBlockIndex, dstOrigBlockIndex,
+ srcAllocOffset, dstAllocOffset,
+ srcAllocSize };
+ moves.push_back(move);
+ }
+ }
+ // Different block
+ else
+ {
+ // MOVE OPTION 2: Move the allocation to a different block.
+
+ VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
+ VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
+
+ VmaSuballocation suballoc = *srcSuballocIt;
+ suballoc.offset = dstAllocOffset;
+ suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
+ dstOffset = dstAllocOffset + srcAllocSize;
+ m_BytesMoved += srcAllocSize;
+ ++m_AllocationsMoved;
+
+ VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
+ ++nextSuballocIt;
+ pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
+ srcSuballocIt = nextSuballocIt;
+
+ pDstMetadata->m_Suballocations.push_back(suballoc);
+
+ VmaDefragmentationMove move = {
+ srcOrigBlockIndex, dstOrigBlockIndex,
+ srcAllocOffset, dstAllocOffset,
+ srcAllocSize };
+ moves.push_back(move);
+ }
+ }
+ }
+ }
+
+ m_BlockInfos.clear();
+
+ PostprocessMetadata();
+
+ return VK_SUCCESS;
}
void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
{
- const size_t blockCount = m_pBlockVector->GetBlockCount();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- VmaBlockMetadata_Generic* const pMetadata =
- (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
- pMetadata->m_FreeCount = 0;
- pMetadata->m_SumFreeSize = pMetadata->GetSize();
- pMetadata->m_FreeSuballocationsBySize.clear();
- for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
- it != pMetadata->m_Suballocations.end(); )
- {
- if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
- {
- VmaSuballocationList::iterator nextIt = it;
- ++nextIt;
- pMetadata->m_Suballocations.erase(it);
- it = nextIt;
- }
- else
- {
- ++it;
- }
- }
- }
+ const size_t blockCount = m_pBlockVector->GetBlockCount();
+ for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+ {
+ VmaBlockMetadata_Generic* const pMetadata =
+ (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
+ pMetadata->m_FreeCount = 0;
+ pMetadata->m_SumFreeSize = pMetadata->GetSize();
+ pMetadata->m_FreeSuballocationsBySize.clear();
+ for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
+ it != pMetadata->m_Suballocations.end(); )
+ {
+ if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
+ {
+ VmaSuballocationList::iterator nextIt = it;
+ ++nextIt;
+ pMetadata->m_Suballocations.erase(it);
+ it = nextIt;
+ }
+ else
+ {
+ ++it;
+ }
+ }
+ }
}
void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
{
- const size_t blockCount = m_pBlockVector->GetBlockCount();
- for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
- {
- VmaBlockMetadata_Generic* const pMetadata =
- (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
- const VkDeviceSize blockSize = pMetadata->GetSize();
-
- // No allocations in this block - entire area is free.
- if(pMetadata->m_Suballocations.empty())
- {
- pMetadata->m_FreeCount = 1;
- //pMetadata->m_SumFreeSize is already set to blockSize.
- VmaSuballocation suballoc = {
- 0, // offset
- blockSize, // size
- VMA_NULL, // hAllocation
- VMA_SUBALLOCATION_TYPE_FREE };
- pMetadata->m_Suballocations.push_back(suballoc);
- pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
- }
- // There are some allocations in this block.
- else
- {
- VkDeviceSize offset = 0;
- VmaSuballocationList::iterator it;
- for(it = pMetadata->m_Suballocations.begin();
- it != pMetadata->m_Suballocations.end();
- ++it)
- {
- VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
- VMA_ASSERT(it->offset >= offset);
-
- // Need to insert preceding free space.
- if(it->offset > offset)
- {
- ++pMetadata->m_FreeCount;
- const VkDeviceSize freeSize = it->offset - offset;
- VmaSuballocation suballoc = {
- offset, // offset
- freeSize, // size
- VMA_NULL, // hAllocation
- VMA_SUBALLOCATION_TYPE_FREE };
- VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
- if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
- }
- }
-
- pMetadata->m_SumFreeSize -= it->size;
- offset = it->offset + it->size;
- }
-
- // Need to insert trailing free space.
- if(offset < blockSize)
- {
- ++pMetadata->m_FreeCount;
- const VkDeviceSize freeSize = blockSize - offset;
- VmaSuballocation suballoc = {
- offset, // offset
- freeSize, // size
- VMA_NULL, // hAllocation
- VMA_SUBALLOCATION_TYPE_FREE };
- VMA_ASSERT(it == pMetadata->m_Suballocations.end());
- VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
- if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
- {
- pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
- }
- }
-
- VMA_SORT(
- pMetadata->m_FreeSuballocationsBySize.begin(),
- pMetadata->m_FreeSuballocationsBySize.end(),
- VmaSuballocationItemSizeLess());
- }
-
- VMA_HEAVY_ASSERT(pMetadata->Validate());
- }
+ const size_t blockCount = m_pBlockVector->GetBlockCount();
+ for(size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
+ {
+ VmaBlockMetadata_Generic* const pMetadata =
+ (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
+ const VkDeviceSize blockSize = pMetadata->GetSize();
+
+ // No allocations in this block - entire area is free.
+ if(pMetadata->m_Suballocations.empty())
+ {
+ pMetadata->m_FreeCount = 1;
+ //pMetadata->m_SumFreeSize is already set to blockSize.
+ VmaSuballocation suballoc = {
+ 0, // offset
+ blockSize, // size
+ VMA_NULL, // hAllocation
+ VMA_SUBALLOCATION_TYPE_FREE };
+ pMetadata->m_Suballocations.push_back(suballoc);
+ pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
+ }
+ // There are some allocations in this block.
+ else
+ {
+ VkDeviceSize offset = 0;
+ VmaSuballocationList::iterator it;
+ for(it = pMetadata->m_Suballocations.begin();
+ it != pMetadata->m_Suballocations.end();
+ ++it)
+ {
+ VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
+ VMA_ASSERT(it->offset >= offset);
+
+ // Need to insert preceding free space.
+ if(it->offset > offset)
+ {
+ ++pMetadata->m_FreeCount;
+ const VkDeviceSize freeSize = it->offset - offset;
+ VmaSuballocation suballoc = {
+ offset, // offset
+ freeSize, // size
+ VMA_NULL, // hAllocation
+ VMA_SUBALLOCATION_TYPE_FREE };
+ VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
+ if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
+ }
+ }
+
+ pMetadata->m_SumFreeSize -= it->size;
+ offset = it->offset + it->size;
+ }
+
+ // Need to insert trailing free space.
+ if(offset < blockSize)
+ {
+ ++pMetadata->m_FreeCount;
+ const VkDeviceSize freeSize = blockSize - offset;
+ VmaSuballocation suballoc = {
+ offset, // offset
+ freeSize, // size
+ VMA_NULL, // hAllocation
+ VMA_SUBALLOCATION_TYPE_FREE };
+ VMA_ASSERT(it == pMetadata->m_Suballocations.end());
+ VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
+ if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
+ {
+ pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
+ }
+ }
+
+ VMA_SORT(
+ pMetadata->m_FreeSuballocationsBySize.begin(),
+ pMetadata->m_FreeSuballocationsBySize.end(),
+ VmaSuballocationItemSizeLess());
+ }
+
+ VMA_HEAVY_ASSERT(pMetadata->Validate());
+ }
}
void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata, const VmaSuballocation& suballoc)
{
- // TODO: Optimize somehow. Remember iterator instead of searching for it linearly.
- VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
- while(it != pMetadata->m_Suballocations.end())
- {
- if(it->offset < suballoc.offset)
- {
- ++it;
- }
- }
- pMetadata->m_Suballocations.insert(it, suballoc);
+ // TODO: Optimize somehow. Remember iterator instead of searching for it linearly.
+ VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
+ while(it != pMetadata->m_Suballocations.end())
+ {
+ if(it->offset < suballoc.offset)
+ {
+ ++it;
+ }
+ }
+ pMetadata->m_Suballocations.insert(it, suballoc);
}
////////////////////////////////////////////////////////////////////////////////
// VmaBlockVectorDefragmentationContext
VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
- VmaAllocator hAllocator,
- VmaPool hCustomPool,
- VmaBlockVector* pBlockVector,
- uint32_t currFrameIndex) :
- res(VK_SUCCESS),
- mutexLocked(false),
- blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
- defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
- defragmentationMovesProcessed(0),
- defragmentationMovesCommitted(0),
- hasDefragmentationPlan(0),
- m_hAllocator(hAllocator),
- m_hCustomPool(hCustomPool),
- m_pBlockVector(pBlockVector),
- m_CurrFrameIndex(currFrameIndex),
- m_pAlgorithm(VMA_NULL),
- m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
- m_AllAllocations(false)
+ VmaAllocator hAllocator,
+ VmaPool hCustomPool,
+ VmaBlockVector* pBlockVector,
+ uint32_t currFrameIndex) :
+ res(VK_SUCCESS),
+ mutexLocked(false),
+ blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
+ m_hAllocator(hAllocator),
+ m_hCustomPool(hCustomPool),
+ m_pBlockVector(pBlockVector),
+ m_CurrFrameIndex(currFrameIndex),
+ m_pAlgorithm(VMA_NULL),
+ m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
+ m_AllAllocations(false)
{
}
VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
{
- vma_delete(m_hAllocator, m_pAlgorithm);
+ vma_delete(m_hAllocator, m_pAlgorithm);
}
void VmaBlockVectorDefragmentationContext::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
{
- AllocInfo info = { hAlloc, pChanged };
- m_Allocations.push_back(info);
-}
-
-void VmaBlockVectorDefragmentationContext::Begin(bool overlappingMoveSupported, VmaDefragmentationFlags flags)
-{
- const bool allAllocations = m_AllAllocations ||
- m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
-
- /********************************
- HERE IS THE CHOICE OF DEFRAGMENTATION ALGORITHM.
- ********************************/
-
- /*
- Fast algorithm is supported only when certain criteria are met:
- - VMA_DEBUG_MARGIN is 0.
- - All allocations in this block vector are moveable.
- - There is no possibility of image/buffer granularity conflict.
- - The defragmentation is not incremental
- */
- if(VMA_DEBUG_MARGIN == 0 &&
- allAllocations &&
- !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
- !(flags & VMA_DEFRAGMENTATION_FLAG_INCREMENTAL))
- {
- m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
- m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
- }
- else
- {
- m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
- m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
- }
-
- if(allAllocations)
- {
- m_pAlgorithm->AddAll();
- }
- else
- {
- for(size_t i = 0, count = m_Allocations.size(); i < count; ++i)
- {
- m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
- }
- }
+ AllocInfo info = { hAlloc, pChanged };
+ m_Allocations.push_back(info);
+}
+
+void VmaBlockVectorDefragmentationContext::Begin(bool overlappingMoveSupported)
+{
+ const bool allAllocations = m_AllAllocations ||
+ m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
+
+ /********************************
+ HERE IS THE CHOICE OF DEFRAGMENTATION ALGORITHM.
+ ********************************/
+
+ /*
+ Fast algorithm is supported only when certain criteria are met:
+ - VMA_DEBUG_MARGIN is 0.
+ - All allocations in this block vector are moveable.
+ - There is no possibility of image/buffer granularity conflict.
+ */
+ if(VMA_DEBUG_MARGIN == 0 &&
+ allAllocations &&
+ !m_pBlockVector->IsBufferImageGranularityConflictPossible())
+ {
+ m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
+ m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
+ }
+ else
+ {
+ m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
+ m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
+ }
+
+ if(allAllocations)
+ {
+ m_pAlgorithm->AddAll();
+ }
+ else
+ {
+ for(size_t i = 0, count = m_Allocations.size(); i < count; ++i)
+ {
+ m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
+ }
+ }
}
////////////////////////////////////////////////////////////////////////////////
// VmaDefragmentationContext
VmaDefragmentationContext_T::VmaDefragmentationContext_T(
- VmaAllocator hAllocator,
- uint32_t currFrameIndex,
- uint32_t flags,
- VmaDefragmentationStats* pStats) :
- m_hAllocator(hAllocator),
- m_CurrFrameIndex(currFrameIndex),
- m_Flags(flags),
- m_pStats(pStats),
- m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
+ VmaAllocator hAllocator,
+ uint32_t currFrameIndex,
+ uint32_t flags,
+ VmaDefragmentationStats* pStats) :
+ m_hAllocator(hAllocator),
+ m_CurrFrameIndex(currFrameIndex),
+ m_Flags(flags),
+ m_pStats(pStats),
+ m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
{
- memset(m_DefaultPoolContexts, 0, sizeof(m_DefaultPoolContexts));
+ memset(m_DefaultPoolContexts, 0, sizeof(m_DefaultPoolContexts));
}
VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
{
- for(size_t i = m_CustomPoolContexts.size(); i--; )
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
- pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
- vma_delete(m_hAllocator, pBlockVectorCtx);
- }
- for(size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
- if(pBlockVectorCtx)
- {
- pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
- vma_delete(m_hAllocator, pBlockVectorCtx);
- }
- }
+ for(size_t i = m_CustomPoolContexts.size(); i--; )
+ {
+ VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
+ pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
+ vma_delete(m_hAllocator, pBlockVectorCtx);
+ }
+ for(size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
+ {
+ VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
+ if(pBlockVectorCtx)
+ {
+ pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
+ vma_delete(m_hAllocator, pBlockVectorCtx);
+ }
+ }
}
void VmaDefragmentationContext_T::AddPools(uint32_t poolCount, VmaPool* pPools)
{
- for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
- {
- VmaPool pool = pPools[poolIndex];
- VMA_ASSERT(pool);
- // Pools with algorithm other than default are not defragmented.
- if(pool->m_BlockVector.GetAlgorithm() == 0)
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
-
- for(size_t i = m_CustomPoolContexts.size(); i--; )
- {
- if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
- {
- pBlockVectorDefragCtx = m_CustomPoolContexts[i];
- break;
- }
- }
-
- if(!pBlockVectorDefragCtx)
- {
- pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
- m_hAllocator,
- pool,
- &pool->m_BlockVector,
- m_CurrFrameIndex);
- m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
- }
-
- pBlockVectorDefragCtx->AddAll();
- }
- }
+ for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
+ {
+ VmaPool pool = pPools[poolIndex];
+ VMA_ASSERT(pool);
+ // Pools with algorithm other than default are not defragmented.
+ if(pool->m_BlockVector.GetAlgorithm() == 0)
+ {
+ VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
+
+ for(size_t i = m_CustomPoolContexts.size(); i--; )
+ {
+ if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
+ {
+ pBlockVectorDefragCtx = m_CustomPoolContexts[i];
+ break;
+ }
+ }
+
+ if(!pBlockVectorDefragCtx)
+ {
+ pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+ m_hAllocator,
+ pool,
+ &pool->m_BlockVector,
+ m_CurrFrameIndex);
+ m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
+ }
+
+ pBlockVectorDefragCtx->AddAll();
+ }
+ }
}
void VmaDefragmentationContext_T::AddAllocations(
- uint32_t allocationCount,
- VmaAllocation* pAllocations,
- VkBool32* pAllocationsChanged)
-{
- // Dispatch pAllocations among defragmentators. Create them when necessary.
- for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- const VmaAllocation hAlloc = pAllocations[allocIndex];
- VMA_ASSERT(hAlloc);
- // DedicatedAlloc cannot be defragmented.
- if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
- // Lost allocation cannot be defragmented.
- (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
-
- const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
- // This allocation belongs to custom pool.
- if(hAllocPool != VK_NULL_HANDLE)
- {
- // Pools with algorithm other than default are not defragmented.
- if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
- {
- for(size_t i = m_CustomPoolContexts.size(); i--; )
- {
- if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
- {
- pBlockVectorDefragCtx = m_CustomPoolContexts[i];
- break;
- }
- }
- if(!pBlockVectorDefragCtx)
- {
- pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
- m_hAllocator,
- hAllocPool,
- &hAllocPool->m_BlockVector,
- m_CurrFrameIndex);
- m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
- }
- }
- }
- // This allocation belongs to default pool.
- else
- {
- const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
- pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
- if(!pBlockVectorDefragCtx)
- {
- pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
- m_hAllocator,
- VMA_NULL, // hCustomPool
- m_hAllocator->m_pBlockVectors[memTypeIndex],
- m_CurrFrameIndex);
- m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
- }
- }
-
- if(pBlockVectorDefragCtx)
- {
- VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
- &pAllocationsChanged[allocIndex] : VMA_NULL;
- pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
- }
- }
- }
+ uint32_t allocationCount,
+ VmaAllocation* pAllocations,
+ VkBool32* pAllocationsChanged)
+{
+ // Dispatch pAllocations among defragmentators. Create them when necessary.
+ for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+ {
+ const VmaAllocation hAlloc = pAllocations[allocIndex];
+ VMA_ASSERT(hAlloc);
+ // DedicatedAlloc cannot be defragmented.
+ if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
+ // Lost allocation cannot be defragmented.
+ (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
+ {
+ VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
+
+ const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
+ // This allocation belongs to custom pool.
+ if(hAllocPool != VK_NULL_HANDLE)
+ {
+ // Pools with algorithm other than default are not defragmented.
+ if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
+ {
+ for(size_t i = m_CustomPoolContexts.size(); i--; )
+ {
+ if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
+ {
+ pBlockVectorDefragCtx = m_CustomPoolContexts[i];
+ break;
+ }
+ }
+ if(!pBlockVectorDefragCtx)
+ {
+ pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+ m_hAllocator,
+ hAllocPool,
+ &hAllocPool->m_BlockVector,
+ m_CurrFrameIndex);
+ m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
+ }
+ }
+ }
+ // This allocation belongs to default pool.
+ else
+ {
+ const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
+ pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
+ if(!pBlockVectorDefragCtx)
+ {
+ pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
+ m_hAllocator,
+ VMA_NULL, // hCustomPool
+ m_hAllocator->m_pBlockVectors[memTypeIndex],
+ m_CurrFrameIndex);
+ m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
+ }
+ }
+
+ if(pBlockVectorDefragCtx)
+ {
+ VkBool32* const pChanged = (pAllocationsChanged != VMA_NULL) ?
+ &pAllocationsChanged[allocIndex] : VMA_NULL;
+ pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
+ }
+ }
+ }
}
VkResult VmaDefragmentationContext_T::Defragment(
- VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
- VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
- VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats, VmaDefragmentationFlags flags)
-{
- if(pStats)
- {
- memset(pStats, 0, sizeof(VmaDefragmentationStats));
- }
-
- if(flags & VMA_DEFRAGMENTATION_FLAG_INCREMENTAL)
- {
- // For incremental defragmetnations, we just earmark how much we can move
- // The real meat is in the defragmentation steps
- m_MaxCpuBytesToMove = maxCpuBytesToMove;
- m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
-
- m_MaxGpuBytesToMove = maxGpuBytesToMove;
- m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
-
- if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
- m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
- return VK_SUCCESS;
-
- return VK_NOT_READY;
- }
-
- if(commandBuffer == VK_NULL_HANDLE)
- {
- maxGpuBytesToMove = 0;
- maxGpuAllocationsToMove = 0;
- }
-
- VkResult res = VK_SUCCESS;
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0;
- memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
- ++memTypeIndex)
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
- if(pBlockVectorCtx)
- {
- VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
- pBlockVectorCtx->GetBlockVector()->Defragment(
- pBlockVectorCtx,
- pStats, flags,
- maxCpuBytesToMove, maxCpuAllocationsToMove,
- maxGpuBytesToMove, maxGpuAllocationsToMove,
- commandBuffer);
- if(pBlockVectorCtx->res != VK_SUCCESS)
- {
- res = pBlockVectorCtx->res;
- }
- }
- }
-
- // Process custom pools.
- for(size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
- customCtxIndex < customCtxCount && res >= VK_SUCCESS;
- ++customCtxIndex)
- {
- VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
- VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
- pBlockVectorCtx->GetBlockVector()->Defragment(
- pBlockVectorCtx,
- pStats, flags,
- maxCpuBytesToMove, maxCpuAllocationsToMove,
- maxGpuBytesToMove, maxGpuAllocationsToMove,
- commandBuffer);
- if(pBlockVectorCtx->res != VK_SUCCESS)
- {
- res = pBlockVectorCtx->res;
- }
- }
-
- return res;
-}
-
-VkResult VmaDefragmentationContext_T::DefragmentPassBegin(VmaDefragmentationPassInfo* pInfo)
-{
- VmaDefragmentationPassMoveInfo* pCurrentMove = pInfo->pMoves;
- uint32_t movesLeft = pInfo->moveCount;
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0;
- memTypeIndex < m_hAllocator->GetMemoryTypeCount();
- ++memTypeIndex)
- {
- VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
- if(pBlockVectorCtx)
- {
- VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
-
- if(!pBlockVectorCtx->hasDefragmentationPlan)
- {
- pBlockVectorCtx->GetBlockVector()->Defragment(
- pBlockVectorCtx,
- m_pStats, m_Flags,
- m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
- m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
- VK_NULL_HANDLE);
-
- if(pBlockVectorCtx->res < VK_SUCCESS)
- continue;
-
- pBlockVectorCtx->hasDefragmentationPlan = true;
- }
-
- const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
- pBlockVectorCtx,
- pCurrentMove, movesLeft);
-
- movesLeft -= processed;
- pCurrentMove += processed;
- }
- }
-
- // Process custom pools.
- for(size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
- customCtxIndex < customCtxCount;
- ++customCtxIndex)
- {
- VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
- VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
-
- if(!pBlockVectorCtx->hasDefragmentationPlan)
- {
- pBlockVectorCtx->GetBlockVector()->Defragment(
- pBlockVectorCtx,
- m_pStats, m_Flags,
- m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
- m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
- VK_NULL_HANDLE);
-
- if(pBlockVectorCtx->res < VK_SUCCESS)
- continue;
-
- pBlockVectorCtx->hasDefragmentationPlan = true;
- }
-
- const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
- pBlockVectorCtx,
- pCurrentMove, movesLeft);
-
- movesLeft -= processed;
- pCurrentMove += processed;
- }
-
- pInfo->moveCount = pInfo->moveCount - movesLeft;
-
- return VK_SUCCESS;
-}
-VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
-{
- VkResult res = VK_SUCCESS;
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0;
- memTypeIndex < m_hAllocator->GetMemoryTypeCount();
- ++memTypeIndex)
- {
- VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
- if(pBlockVectorCtx)
- {
- VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
-
- if(!pBlockVectorCtx->hasDefragmentationPlan)
- {
- res = VK_NOT_READY;
- continue;
- }
-
- pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
- pBlockVectorCtx, m_pStats);
-
- if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
- res = VK_NOT_READY;
- }
- }
-
- // Process custom pools.
- for(size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
- customCtxIndex < customCtxCount;
- ++customCtxIndex)
- {
- VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
- VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
-
- if(!pBlockVectorCtx->hasDefragmentationPlan)
- {
- res = VK_NOT_READY;
- continue;
- }
-
- pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
- pBlockVectorCtx, m_pStats);
-
- if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
- res = VK_NOT_READY;
- }
-
- return res;
+ VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
+ VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
+ VkCommandBuffer commandBuffer, VmaDefragmentationStats* pStats)
+{
+ if(pStats)
+ {
+ memset(pStats, 0, sizeof(VmaDefragmentationStats));
+ }
+
+ if(commandBuffer == VK_NULL_HANDLE)
+ {
+ maxGpuBytesToMove = 0;
+ maxGpuAllocationsToMove = 0;
+ }
+
+ VkResult res = VK_SUCCESS;
+
+ // Process default pools.
+ for(uint32_t memTypeIndex = 0;
+ memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
+ ++memTypeIndex)
+ {
+ VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
+ if(pBlockVectorCtx)
+ {
+ VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
+ pBlockVectorCtx->GetBlockVector()->Defragment(
+ pBlockVectorCtx,
+ pStats,
+ maxCpuBytesToMove, maxCpuAllocationsToMove,
+ maxGpuBytesToMove, maxGpuAllocationsToMove,
+ commandBuffer);
+ if(pBlockVectorCtx->res != VK_SUCCESS)
+ {
+ res = pBlockVectorCtx->res;
+ }
+ }
+ }
+
+ // Process custom pools.
+ for(size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
+ customCtxIndex < customCtxCount && res >= VK_SUCCESS;
+ ++customCtxIndex)
+ {
+ VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
+ VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
+ pBlockVectorCtx->GetBlockVector()->Defragment(
+ pBlockVectorCtx,
+ pStats,
+ maxCpuBytesToMove, maxCpuAllocationsToMove,
+ maxGpuBytesToMove, maxGpuAllocationsToMove,
+ commandBuffer);
+ if(pBlockVectorCtx->res != VK_SUCCESS)
+ {
+ res = pBlockVectorCtx->res;
+ }
+ }
+
+ return res;
}
////////////////////////////////////////////////////////////////////////////////
@@ -14342,579 +13946,577 @@ VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
#if VMA_RECORDING_ENABLED
VmaRecorder::VmaRecorder() :
- m_UseMutex(true),
- m_Flags(0),
- m_File(VMA_NULL),
- m_Freq(INT64_MAX),
- m_StartCounter(INT64_MAX)
+ m_UseMutex(true),
+ m_Flags(0),
+ m_File(VMA_NULL),
+ m_Freq(INT64_MAX),
+ m_StartCounter(INT64_MAX)
{
}
VkResult VmaRecorder::Init(const VmaRecordSettings& settings, bool useMutex)
{
- m_UseMutex = useMutex;
- m_Flags = settings.flags;
+ m_UseMutex = useMutex;
+ m_Flags = settings.flags;
- QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
- QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
+ QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
+ QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
- // Open file for writing.
- errno_t err = fopen_s(&m_File, settings.pFilePath, "wb");
- if(err != 0)
- {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
+ // Open file for writing.
+ errno_t err = fopen_s(&m_File, settings.pFilePath, "wb");
+ if(err != 0)
+ {
+ return VK_ERROR_INITIALIZATION_FAILED;
+ }
- // Write header.
- fprintf(m_File, "%s\n", "Vulkan Memory Allocator,Calls recording");
- fprintf(m_File, "%s\n", "1,8");
+ // Write header.
+ fprintf(m_File, "%s\n", "Vulkan Memory Allocator,Calls recording");
+ fprintf(m_File, "%s\n", "1,8");
- return VK_SUCCESS;
+ return VK_SUCCESS;
}
VmaRecorder::~VmaRecorder()
{
- if(m_File != VMA_NULL)
- {
- fclose(m_File);
- }
+ if(m_File != VMA_NULL)
+ {
+ fclose(m_File);
+ }
}
void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
+ Flush();
}
void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
+ Flush();
}
void VmaRecorder::RecordCreatePool(uint32_t frameIndex, const VmaPoolCreateInfo& createInfo, VmaPool pool)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
- createInfo.memoryTypeIndex,
- createInfo.flags,
- createInfo.blockSize,
- (uint64_t)createInfo.minBlockCount,
- (uint64_t)createInfo.maxBlockCount,
- createInfo.frameInUseCount,
- pool);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
+ createInfo.memoryTypeIndex,
+ createInfo.flags,
+ createInfo.blockSize,
+ (uint64_t)createInfo.minBlockCount,
+ (uint64_t)createInfo.maxBlockCount,
+ createInfo.frameInUseCount,
+ pool);
+ Flush();
}
void VmaRecorder::RecordDestroyPool(uint32_t frameIndex, VmaPool pool)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
- pool);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
+ pool);
+ Flush();
}
void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
+ const VkMemoryRequirements& vkMemReq,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaAllocation allocation)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ vkMemReq.size,
+ vkMemReq.alignment,
+ vkMemReq.memoryTypeBits,
+ createInfo.flags,
+ createInfo.usage,
+ createInfo.requiredFlags,
+ createInfo.preferredFlags,
+ createInfo.memoryTypeBits,
+ createInfo.pool,
+ allocation,
+ userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- const VmaAllocationCreateInfo& createInfo,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool);
- PrintPointerList(allocationCount, pAllocations);
- fprintf(m_File, ",%s\n", userDataStr.GetString());
- Flush();
+ const VkMemoryRequirements& vkMemReq,
+ const VmaAllocationCreateInfo& createInfo,
+ uint64_t allocationCount,
+ const VmaAllocation* pAllocations)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
+ vkMemReq.size,
+ vkMemReq.alignment,
+ vkMemReq.memoryTypeBits,
+ createInfo.flags,
+ createInfo.usage,
+ createInfo.requiredFlags,
+ createInfo.preferredFlags,
+ createInfo.memoryTypeBits,
+ createInfo.pool);
+ PrintPointerList(allocationCount, pAllocations);
+ fprintf(m_File, ",%s\n", userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- requiresDedicatedAllocation ? 1 : 0,
- prefersDedicatedAllocation ? 1 : 0,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
+ const VkMemoryRequirements& vkMemReq,
+ bool requiresDedicatedAllocation,
+ bool prefersDedicatedAllocation,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaAllocation allocation)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ vkMemReq.size,
+ vkMemReq.alignment,
+ vkMemReq.memoryTypeBits,
+ requiresDedicatedAllocation ? 1 : 0,
+ prefersDedicatedAllocation ? 1 : 0,
+ createInfo.flags,
+ createInfo.usage,
+ createInfo.requiredFlags,
+ createInfo.preferredFlags,
+ createInfo.memoryTypeBits,
+ createInfo.pool,
+ allocation,
+ userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- const VmaAllocationCreateInfo& createInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- vkMemReq.size,
- vkMemReq.alignment,
- vkMemReq.memoryTypeBits,
- requiresDedicatedAllocation ? 1 : 0,
- prefersDedicatedAllocation ? 1 : 0,
- createInfo.flags,
- createInfo.usage,
- createInfo.requiredFlags,
- createInfo.preferredFlags,
- createInfo.memoryTypeBits,
- createInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
+ const VkMemoryRequirements& vkMemReq,
+ bool requiresDedicatedAllocation,
+ bool prefersDedicatedAllocation,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaAllocation allocation)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(createInfo.flags, createInfo.pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ vkMemReq.size,
+ vkMemReq.alignment,
+ vkMemReq.memoryTypeBits,
+ requiresDedicatedAllocation ? 1 : 0,
+ prefersDedicatedAllocation ? 1 : 0,
+ createInfo.flags,
+ createInfo.usage,
+ createInfo.requiredFlags,
+ createInfo.preferredFlags,
+ createInfo.memoryTypeBits,
+ createInfo.pool,
+ allocation,
+ userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
- uint64_t allocationCount,
- const VmaAllocation* pAllocations)
+ uint64_t allocationCount,
+ const VmaAllocation* pAllocations)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
- PrintPointerList(allocationCount, pAllocations);
- fprintf(m_File, "\n");
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
+ PrintPointerList(allocationCount, pAllocations);
+ fprintf(m_File, "\n");
+ Flush();
}
void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
- VmaAllocation allocation,
- const void* pUserData)
+ VmaAllocation allocation,
+ const void* pUserData)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(
- allocation->IsUserDataString() ? VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT : 0,
- pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- allocation,
- userDataStr.GetString());
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(
+ allocation->IsUserDataString() ? VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT : 0,
+ pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ allocation,
+ userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+ VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
- allocation,
- offset,
- size);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
+ allocation,
+ offset,
+ size);
+ Flush();
}
void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
- VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
+ VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
- allocation,
- offset,
- size);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
+ allocation,
+ offset,
+ size);
+ Flush();
}
void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
- const VkBufferCreateInfo& bufCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- bufCreateInfo.flags,
- bufCreateInfo.size,
- bufCreateInfo.usage,
- bufCreateInfo.sharingMode,
- allocCreateInfo.flags,
- allocCreateInfo.usage,
- allocCreateInfo.requiredFlags,
- allocCreateInfo.preferredFlags,
- allocCreateInfo.memoryTypeBits,
- allocCreateInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
+ const VkBufferCreateInfo& bufCreateInfo,
+ const VmaAllocationCreateInfo& allocCreateInfo,
+ VmaAllocation allocation)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ bufCreateInfo.flags,
+ bufCreateInfo.size,
+ bufCreateInfo.usage,
+ bufCreateInfo.sharingMode,
+ allocCreateInfo.flags,
+ allocCreateInfo.usage,
+ allocCreateInfo.requiredFlags,
+ allocCreateInfo.preferredFlags,
+ allocCreateInfo.memoryTypeBits,
+ allocCreateInfo.pool,
+ allocation,
+ userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
- const VkImageCreateInfo& imageCreateInfo,
- const VmaAllocationCreateInfo& allocCreateInfo,
- VmaAllocation allocation)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
- fprintf(m_File, "%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- imageCreateInfo.flags,
- imageCreateInfo.imageType,
- imageCreateInfo.format,
- imageCreateInfo.extent.width,
- imageCreateInfo.extent.height,
- imageCreateInfo.extent.depth,
- imageCreateInfo.mipLevels,
- imageCreateInfo.arrayLayers,
- imageCreateInfo.samples,
- imageCreateInfo.tiling,
- imageCreateInfo.usage,
- imageCreateInfo.sharingMode,
- imageCreateInfo.initialLayout,
- allocCreateInfo.flags,
- allocCreateInfo.usage,
- allocCreateInfo.requiredFlags,
- allocCreateInfo.preferredFlags,
- allocCreateInfo.memoryTypeBits,
- allocCreateInfo.pool,
- allocation,
- userDataStr.GetString());
- Flush();
+ const VkImageCreateInfo& imageCreateInfo,
+ const VmaAllocationCreateInfo& allocCreateInfo,
+ VmaAllocation allocation)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ UserDataString userDataStr(allocCreateInfo.flags, allocCreateInfo.pUserData);
+ fprintf(m_File, "%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ imageCreateInfo.flags,
+ imageCreateInfo.imageType,
+ imageCreateInfo.format,
+ imageCreateInfo.extent.width,
+ imageCreateInfo.extent.height,
+ imageCreateInfo.extent.depth,
+ imageCreateInfo.mipLevels,
+ imageCreateInfo.arrayLayers,
+ imageCreateInfo.samples,
+ imageCreateInfo.tiling,
+ imageCreateInfo.usage,
+ imageCreateInfo.sharingMode,
+ imageCreateInfo.initialLayout,
+ allocCreateInfo.flags,
+ allocCreateInfo.usage,
+ allocCreateInfo.requiredFlags,
+ allocCreateInfo.preferredFlags,
+ allocCreateInfo.memoryTypeBits,
+ allocCreateInfo.pool,
+ allocation,
+ userDataStr.GetString());
+ Flush();
}
void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
- VmaAllocation allocation)
+ VmaAllocation allocation)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
- allocation);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
+ allocation);
+ Flush();
}
void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
- VmaPool pool)
+ VmaPool pool)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
- pool);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
+ pool);
+ Flush();
}
void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationContext ctx)
-{
- CallParams callParams;
- GetBasicParams(callParams);
-
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
- info.flags);
- PrintPointerList(info.allocationCount, info.pAllocations);
- fprintf(m_File, ",");
- PrintPointerList(info.poolCount, info.pPools);
- fprintf(m_File, ",%llu,%u,%llu,%u,%p,%p\n",
- info.maxCpuBytesToMove,
- info.maxCpuAllocationsToMove,
- info.maxGpuBytesToMove,
- info.maxGpuAllocationsToMove,
- info.commandBuffer,
- ctx);
- Flush();
+ const VmaDefragmentationInfo2& info,
+ VmaDefragmentationContext ctx)
+{
+ CallParams callParams;
+ GetBasicParams(callParams);
+
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
+ info.flags);
+ PrintPointerList(info.allocationCount, info.pAllocations);
+ fprintf(m_File, ",");
+ PrintPointerList(info.poolCount, info.pPools);
+ fprintf(m_File, ",%llu,%u,%llu,%u,%p,%p\n",
+ info.maxCpuBytesToMove,
+ info.maxCpuAllocationsToMove,
+ info.maxGpuBytesToMove,
+ info.maxGpuAllocationsToMove,
+ info.commandBuffer,
+ ctx);
+ Flush();
}
void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
- VmaDefragmentationContext ctx)
+ VmaDefragmentationContext ctx)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
- ctx);
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
+ ctx);
+ Flush();
}
void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
- VmaPool pool,
- const char* name)
+ VmaPool pool,
+ const char* name)
{
- CallParams callParams;
- GetBasicParams(callParams);
+ CallParams callParams;
+ GetBasicParams(callParams);
- VmaMutexLock lock(m_FileMutex, m_UseMutex);
- fprintf(m_File, "%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
- pool, name != VMA_NULL ? name : "");
- Flush();
+ VmaMutexLock lock(m_FileMutex, m_UseMutex);
+ fprintf(m_File, "%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
+ pool, name != VMA_NULL ? name : "");
+ Flush();
}
VmaRecorder::UserDataString::UserDataString(VmaAllocationCreateFlags allocFlags, const void* pUserData)
{
- if(pUserData != VMA_NULL)
- {
- if((allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0)
- {
- m_Str = (const char*)pUserData;
- }
- else
- {
- sprintf_s(m_PtrStr, "%p", pUserData);
- m_Str = m_PtrStr;
- }
- }
- else
- {
- m_Str = "";
- }
+ if(pUserData != VMA_NULL)
+ {
+ if((allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0)
+ {
+ m_Str = (const char*)pUserData;
+ }
+ else
+ {
+ sprintf_s(m_PtrStr, "%p", pUserData);
+ m_Str = m_PtrStr;
+ }
+ }
+ else
+ {
+ m_Str = "";
+ }
}
void VmaRecorder::WriteConfiguration(
- const VkPhysicalDeviceProperties& devProps,
- const VkPhysicalDeviceMemoryProperties& memProps,
- uint32_t vulkanApiVersion,
- bool dedicatedAllocationExtensionEnabled,
- bool bindMemory2ExtensionEnabled,
- bool memoryBudgetExtensionEnabled,
- bool deviceCoherentMemoryExtensionEnabled)
-{
- fprintf(m_File, "Config,Begin\n");
-
- fprintf(m_File, "VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
-
- fprintf(m_File, "PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
- fprintf(m_File, "PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
- fprintf(m_File, "PhysicalDevice,vendorID,%u\n", devProps.vendorID);
- fprintf(m_File, "PhysicalDevice,deviceID,%u\n", devProps.deviceID);
- fprintf(m_File, "PhysicalDevice,deviceType,%u\n", devProps.deviceType);
- fprintf(m_File, "PhysicalDevice,deviceName,%s\n", devProps.deviceName);
-
- fprintf(m_File, "PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
- fprintf(m_File, "PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
- fprintf(m_File, "PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
-
- fprintf(m_File, "PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
- for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
- {
- fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
- fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
- }
- fprintf(m_File, "PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
- for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
- {
- fprintf(m_File, "PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
- fprintf(m_File, "PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
- }
-
- fprintf(m_File, "Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
- fprintf(m_File, "Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
- fprintf(m_File, "Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
- fprintf(m_File, "Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
-
- fprintf(m_File, "Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
- fprintf(m_File, "Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
- fprintf(m_File, "Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
- fprintf(m_File, "Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
- fprintf(m_File, "Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
- fprintf(m_File, "Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
-
- fprintf(m_File, "Config,End\n");
+ const VkPhysicalDeviceProperties& devProps,
+ const VkPhysicalDeviceMemoryProperties& memProps,
+ uint32_t vulkanApiVersion,
+ bool dedicatedAllocationExtensionEnabled,
+ bool bindMemory2ExtensionEnabled,
+ bool memoryBudgetExtensionEnabled)
+{
+ fprintf(m_File, "Config,Begin\n");
+
+ fprintf(m_File, "VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
+
+ fprintf(m_File, "PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
+ fprintf(m_File, "PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
+ fprintf(m_File, "PhysicalDevice,vendorID,%u\n", devProps.vendorID);
+ fprintf(m_File, "PhysicalDevice,deviceID,%u\n", devProps.deviceID);
+ fprintf(m_File, "PhysicalDevice,deviceType,%u\n", devProps.deviceType);
+ fprintf(m_File, "PhysicalDevice,deviceName,%s\n", devProps.deviceName);
+
+ fprintf(m_File, "PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
+ fprintf(m_File, "PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
+ fprintf(m_File, "PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
+
+ fprintf(m_File, "PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
+ for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
+ {
+ fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
+ fprintf(m_File, "PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
+ }
+ fprintf(m_File, "PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
+ for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
+ {
+ fprintf(m_File, "PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
+ fprintf(m_File, "PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
+ }
+
+ fprintf(m_File, "Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
+ fprintf(m_File, "Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
+ fprintf(m_File, "Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
+
+ fprintf(m_File, "Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
+ fprintf(m_File, "Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
+ fprintf(m_File, "Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
+ fprintf(m_File, "Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
+ fprintf(m_File, "Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
+ fprintf(m_File, "Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
+ fprintf(m_File, "Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
+ fprintf(m_File, "Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
+ fprintf(m_File, "Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
+
+ fprintf(m_File, "Config,End\n");
}
void VmaRecorder::GetBasicParams(CallParams& outParams)
{
- outParams.threadId = GetCurrentThreadId();
+ outParams.threadId = GetCurrentThreadId();
- LARGE_INTEGER counter;
- QueryPerformanceCounter(&counter);
- outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
+ LARGE_INTEGER counter;
+ QueryPerformanceCounter(&counter);
+ outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
}
void VmaRecorder::PrintPointerList(uint64_t count, const VmaAllocation* pItems)
{
- if(count)
- {
- fprintf(m_File, "%p", pItems[0]);
- for(uint64_t i = 1; i < count; ++i)
- {
- fprintf(m_File, " %p", pItems[i]);
- }
- }
+ if(count)
+ {
+ fprintf(m_File, "%p", pItems[0]);
+ for(uint64_t i = 1; i < count; ++i)
+ {
+ fprintf(m_File, " %p", pItems[i]);
+ }
+ }
}
void VmaRecorder::Flush()
{
- if((m_Flags & VMA_RECORD_FLUSH_AFTER_CALL_BIT) != 0)
- {
- fflush(m_File);
- }
+ if((m_Flags & VMA_RECORD_FLUSH_AFTER_CALL_BIT) != 0)
+ {
+ fflush(m_File);
+ }
}
#endif // #if VMA_RECORDING_ENABLED
@@ -14923,1878 +14525,1836 @@ void VmaRecorder::Flush()
// VmaAllocationObjectAllocator
VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks) :
- m_Allocator(pAllocationCallbacks, 1024)
+ m_Allocator(pAllocationCallbacks, 1024)
{
}
-template<typename... Types> VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
+VmaAllocation VmaAllocationObjectAllocator::Allocate()
{
- VmaMutexLock mutexLock(m_Mutex);
- return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
+ VmaMutexLock mutexLock(m_Mutex);
+ return m_Allocator.Alloc();
}
void VmaAllocationObjectAllocator::Free(VmaAllocation hAlloc)
{
- VmaMutexLock mutexLock(m_Mutex);
- m_Allocator.Free(hAlloc);
+ VmaMutexLock mutexLock(m_Mutex);
+ m_Allocator.Free(hAlloc);
}
////////////////////////////////////////////////////////////////////////////////
// VmaAllocator_T
VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) :
- m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
- m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
- m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
- m_UseKhrBindMemory2((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0),
- m_UseExtMemoryBudget((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0),
- m_UseAmdDeviceCoherentMemory((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT) != 0),
- m_hDevice(pCreateInfo->device),
- m_hInstance(pCreateInfo->instance),
- m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
- m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
- *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
- m_AllocationObjectAllocator(&m_AllocationCallbacks),
- m_HeapSizeLimitMask(0),
- m_PreferredLargeHeapBlockSize(0),
- m_PhysicalDevice(pCreateInfo->physicalDevice),
- m_CurrentFrameIndex(0),
- m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
- m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks())),
- m_NextPoolId(0),
- m_GlobalMemoryTypeBits(UINT32_MAX)
+ m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0),
+ m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
+ m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0),
+ m_UseKhrBindMemory2((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0),
+ m_UseExtMemoryBudget((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0),
+ m_hDevice(pCreateInfo->device),
+ m_hInstance(pCreateInfo->instance),
+ m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
+ m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
+ *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
+ m_AllocationObjectAllocator(&m_AllocationCallbacks),
+ m_HeapSizeLimitMask(0),
+ m_PreferredLargeHeapBlockSize(0),
+ m_PhysicalDevice(pCreateInfo->physicalDevice),
+ m_CurrentFrameIndex(0),
+ m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
+ m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks())),
+ m_NextPoolId(0)
#if VMA_RECORDING_ENABLED
- ,m_pRecorder(VMA_NULL)
+ ,m_pRecorder(VMA_NULL)
#endif
{
- if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- m_UseKhrDedicatedAllocation = false;
- m_UseKhrBindMemory2 = false;
- }
+ if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ m_UseKhrDedicatedAllocation = false;
+ m_UseKhrBindMemory2 = false;
+ }
- if(VMA_DEBUG_DETECT_CORRUPTION)
- {
- // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it.
- VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0);
- }
+ if(VMA_DEBUG_DETECT_CORRUPTION)
+ {
+ // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it.
+ VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0);
+ }
- VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
+ VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device);
- if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
- {
+ if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
+ {
#if !(VMA_DEDICATED_ALLOCATION)
- if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0)
- {
- VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
- }
+ if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0)
+ {
+ VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
+ }
#endif
#if !(VMA_BIND_MEMORY2)
- if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0)
- {
- VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
- }
+ if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0)
+ {
+ VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
+ }
#endif
- }
+ }
#if !(VMA_MEMORY_BUDGET)
- if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0)
- {
- VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
- }
+ if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0)
+ {
+ VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
+ }
#endif
#if VMA_VULKAN_VERSION < 1001000
- if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
- }
+ if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
+ }
#endif
- memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
- memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
- memset(&m_MemProps, 0, sizeof(m_MemProps));
-
- memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
- memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
- memset(&m_VulkanFunctions, 0, sizeof(m_VulkanFunctions));
-
- if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
- {
- m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
- m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
- }
-
- ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
-
- (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
- (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
-
- VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
- VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
- VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
- VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
-
- m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
- pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
-
- m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
-
- if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
- {
- for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
- {
- const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
- if(limit != VK_WHOLE_SIZE)
- {
- m_HeapSizeLimitMask |= 1u << heapIndex;
- if(limit < m_MemProps.memoryHeaps[heapIndex].size)
- {
- m_MemProps.memoryHeaps[heapIndex].size = limit;
- }
- }
- }
- }
-
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
-
- m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
- this,
- VK_NULL_HANDLE, // hParentPool
- memTypeIndex,
- preferredBlockSize,
- 0,
- SIZE_MAX,
- GetBufferImageGranularity(),
- pCreateInfo->frameInUseCount,
- false, // explicitBlockSize
- false); // linearAlgorithm
- // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
- // becase minBlockCount is 0.
- m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
-
- }
+ memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks));
+ memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties));
+ memset(&m_MemProps, 0, sizeof(m_MemProps));
+
+ memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors));
+ memset(&m_pDedicatedAllocations, 0, sizeof(m_pDedicatedAllocations));
+ memset(&m_VulkanFunctions, 0, sizeof(m_VulkanFunctions));
+
+ if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL)
+ {
+ m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate;
+ m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree;
+ }
+
+ ImportVulkanFunctions(pCreateInfo->pVulkanFunctions);
+
+ (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
+ (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
+
+ VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
+ VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
+ VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
+ VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
+
+ m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ?
+ pCreateInfo->preferredLargeHeapBlockSize : static_cast<VkDeviceSize>(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
+
+ if(pCreateInfo->pHeapSizeLimit != VMA_NULL)
+ {
+ for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
+ {
+ const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex];
+ if(limit != VK_WHOLE_SIZE)
+ {
+ m_HeapSizeLimitMask |= 1u << heapIndex;
+ if(limit < m_MemProps.memoryHeaps[heapIndex].size)
+ {
+ m_MemProps.memoryHeaps[heapIndex].size = limit;
+ }
+ }
+ }
+ }
+
+ for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+ {
+ const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
+
+ m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)(
+ this,
+ VK_NULL_HANDLE, // hParentPool
+ memTypeIndex,
+ preferredBlockSize,
+ 0,
+ SIZE_MAX,
+ GetBufferImageGranularity(),
+ pCreateInfo->frameInUseCount,
+ false, // explicitBlockSize
+ false); // linearAlgorithm
+ // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here,
+ // becase minBlockCount is 0.
+ m_pDedicatedAllocations[memTypeIndex] = vma_new(this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
+
+ }
}
VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo)
{
- VkResult res = VK_SUCCESS;
+ VkResult res = VK_SUCCESS;
- if(pCreateInfo->pRecordSettings != VMA_NULL &&
- !VmaStrIsEmpty(pCreateInfo->pRecordSettings->pFilePath))
- {
+ if(pCreateInfo->pRecordSettings != VMA_NULL &&
+ !VmaStrIsEmpty(pCreateInfo->pRecordSettings->pFilePath))
+ {
#if VMA_RECORDING_ENABLED
- m_pRecorder = vma_new(this, VmaRecorder)();
- res = m_pRecorder->Init(*pCreateInfo->pRecordSettings, m_UseMutex);
- if(res != VK_SUCCESS)
- {
- return res;
- }
- m_pRecorder->WriteConfiguration(
- m_PhysicalDeviceProperties,
- m_MemProps,
- m_VulkanApiVersion,
- m_UseKhrDedicatedAllocation,
- m_UseKhrBindMemory2,
- m_UseExtMemoryBudget,
- m_UseAmdDeviceCoherentMemory);
- m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
+ m_pRecorder = vma_new(this, VmaRecorder)();
+ res = m_pRecorder->Init(*pCreateInfo->pRecordSettings, m_UseMutex);
+ if(res != VK_SUCCESS)
+ {
+ return res;
+ }
+ m_pRecorder->WriteConfiguration(
+ m_PhysicalDeviceProperties,
+ m_MemProps,
+ m_VulkanApiVersion,
+ m_UseKhrDedicatedAllocation,
+ m_UseKhrBindMemory2,
+ m_UseExtMemoryBudget);
+ m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
#else
- VMA_ASSERT(0 && "VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
- return VK_ERROR_FEATURE_NOT_PRESENT;
+ VMA_ASSERT(0 && "VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
+ return VK_ERROR_FEATURE_NOT_PRESENT;
#endif
- }
+ }
#if VMA_MEMORY_BUDGET
- if(m_UseExtMemoryBudget)
- {
- UpdateVulkanBudget();
- }
+ if(m_UseExtMemoryBudget)
+ {
+ UpdateVulkanBudget();
+ }
#endif // #if VMA_MEMORY_BUDGET
- return res;
+ return res;
}
VmaAllocator_T::~VmaAllocator_T()
{
#if VMA_RECORDING_ENABLED
- if(m_pRecorder != VMA_NULL)
- {
- m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
- vma_delete(this, m_pRecorder);
- }
+ if(m_pRecorder != VMA_NULL)
+ {
+ m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
+ vma_delete(this, m_pRecorder);
+ }
#endif
-
- VMA_ASSERT(m_Pools.empty());
+
+ VMA_ASSERT(m_Pools.empty());
- for(size_t i = GetMemoryTypeCount(); i--; )
- {
- if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
- {
- VMA_ASSERT(0 && "Unfreed dedicated allocations found.");
- }
+ for(size_t i = GetMemoryTypeCount(); i--; )
+ {
+ if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
+ {
+ VMA_ASSERT(0 && "Unfreed dedicated allocations found.");
+ }
- vma_delete(this, m_pDedicatedAllocations[i]);
- vma_delete(this, m_pBlockVectors[i]);
- }
+ vma_delete(this, m_pDedicatedAllocations[i]);
+ vma_delete(this, m_pBlockVectors[i]);
+ }
}
void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions)
{
#if VMA_STATIC_VULKAN_FUNCTIONS == 1
- m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
- m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
- m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
- m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
- m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
- m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
- m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
- m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
- m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
- m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
- m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
- m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
- m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
- m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
- m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
- m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
- m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
+ m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
+ m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
+ m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
+ m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
+ m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
+ m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
+ m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
+ m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
+ m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
+ m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
+ m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
+ m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
+ m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
+ m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
+ m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
+ m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
+ m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
#if VMA_VULKAN_VERSION >= 1001000
- if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
- m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
- (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2");
- m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
- (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2");
- m_VulkanFunctions.vkBindBufferMemory2KHR =
- (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindBufferMemory2");
- m_VulkanFunctions.vkBindImageMemory2KHR =
- (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindImageMemory2");
- m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
- (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance, "vkGetPhysicalDeviceMemoryProperties2");
- }
+ if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
+ m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
+ (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2");
+ m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
+ (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2");
+ m_VulkanFunctions.vkBindBufferMemory2KHR =
+ (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindBufferMemory2");
+ m_VulkanFunctions.vkBindImageMemory2KHR =
+ (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindImageMemory2");
+ m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
+ (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance, "vkGetPhysicalDeviceMemoryProperties2");
+ }
#endif
#if VMA_DEDICATED_ALLOCATION
- if(m_UseKhrDedicatedAllocation)
- {
- m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
- (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
- m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
- (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
- }
+ if(m_UseKhrDedicatedAllocation)
+ {
+ m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
+ (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetBufferMemoryRequirements2KHR");
+ m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
+ (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice, "vkGetImageMemoryRequirements2KHR");
+ }
#endif
#if VMA_BIND_MEMORY2
- if(m_UseKhrBindMemory2)
- {
- m_VulkanFunctions.vkBindBufferMemory2KHR =
- (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindBufferMemory2KHR");
- m_VulkanFunctions.vkBindImageMemory2KHR =
- (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindImageMemory2KHR");
- }
+ if(m_UseKhrBindMemory2)
+ {
+ m_VulkanFunctions.vkBindBufferMemory2KHR =
+ (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindBufferMemory2KHR");
+ m_VulkanFunctions.vkBindImageMemory2KHR =
+ (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice, "vkBindImageMemory2KHR");
+ }
#endif // #if VMA_BIND_MEMORY2
#if VMA_MEMORY_BUDGET
- if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
- {
- VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
- m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
- (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance, "vkGetPhysicalDeviceMemoryProperties2KHR");
- }
+ if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
+ {
+ VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
+ m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
+ (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance, "vkGetPhysicalDeviceMemoryProperties2KHR");
+ }
#endif // #if VMA_MEMORY_BUDGET
#endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
#define VMA_COPY_IF_NOT_NULL(funcName) \
- if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
-
- if(pVulkanFunctions != VMA_NULL)
- {
- VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
- VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
- VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
- VMA_COPY_IF_NOT_NULL(vkFreeMemory);
- VMA_COPY_IF_NOT_NULL(vkMapMemory);
- VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
- VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
- VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
- VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
- VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
- VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
- VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
- VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
- VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
- VMA_COPY_IF_NOT_NULL(vkCreateImage);
- VMA_COPY_IF_NOT_NULL(vkDestroyImage);
- VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
+ if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
+
+ if(pVulkanFunctions != VMA_NULL)
+ {
+ VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
+ VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
+ VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
+ VMA_COPY_IF_NOT_NULL(vkFreeMemory);
+ VMA_COPY_IF_NOT_NULL(vkMapMemory);
+ VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
+ VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
+ VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
+ VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
+ VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
+ VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
+ VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
+ VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
+ VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
+ VMA_COPY_IF_NOT_NULL(vkCreateImage);
+ VMA_COPY_IF_NOT_NULL(vkDestroyImage);
+ VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
- VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
+ VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
+ VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
#endif
#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
- VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
- VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
+ VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
+ VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
#endif
#if VMA_MEMORY_BUDGET
- VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
+ VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
#endif
- }
+ }
#undef VMA_COPY_IF_NOT_NULL
- // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
- // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
- VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
+ // If these asserts are hit, you must either #define VMA_STATIC_VULKAN_FUNCTIONS 1
+ // or pass valid pointers as VmaAllocatorCreateInfo::pVulkanFunctions.
+ VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
- {
- VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
- }
+ if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
+ {
+ VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
+ }
#endif
#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
- if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
- {
- VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
- VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
- }
+ if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
+ {
+ VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
+ VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
+ }
#endif
#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
- if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
- }
+ if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
+ }
#endif
}
VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
{
- const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
- const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
- const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
- return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
+ const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+ const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
+ const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
+ return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
}
VkResult VmaAllocator_T::AllocateMemoryOfType(
- VkDeviceSize size,
- VkDeviceSize alignment,
- bool dedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- uint32_t memTypeIndex,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- VMA_ASSERT(pAllocations != VMA_NULL);
- VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
-
- VmaAllocationCreateInfo finalCreateInfo = createInfo;
-
- // If memory type is not HOST_VISIBLE, disable MAPPED.
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
- (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
- }
- // If memory is lazily allocated, it should be always dedicated.
- if(finalCreateInfo.usage == VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED)
- {
- finalCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
- }
-
- VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
- VMA_ASSERT(blockVector);
-
- const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
- bool preferDedicatedMemory =
- VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
- dedicatedAllocation ||
- // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
- size > preferredBlockSize / 2;
-
- if(preferDedicatedMemory &&
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
- finalCreateInfo.pool == VK_NULL_HANDLE)
- {
- finalCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
- }
-
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
- {
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- else
- {
- return AllocateDedicatedMemory(
- size,
- suballocType,
- memTypeIndex,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
- finalCreateInfo.pUserData,
- dedicatedBuffer,
- dedicatedImage,
- allocationCount,
- pAllocations);
- }
- }
- else
- {
- VkResult res = blockVector->Allocate(
- m_CurrentFrameIndex.load(),
- size,
- alignment,
- finalCreateInfo,
- suballocType,
- allocationCount,
- pAllocations);
- if(res == VK_SUCCESS)
- {
- return res;
- }
-
- // 5. Try dedicated memory.
- if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- else
- {
- res = AllocateDedicatedMemory(
- size,
- suballocType,
- memTypeIndex,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
- (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
- finalCreateInfo.pUserData,
- dedicatedBuffer,
- dedicatedImage,
- allocationCount,
- pAllocations);
- if(res == VK_SUCCESS)
- {
- // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
- VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
- return VK_SUCCESS;
- }
- else
- {
- // Everything failed: Return error code.
- VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
- return res;
- }
- }
- }
+ VkDeviceSize size,
+ VkDeviceSize alignment,
+ bool dedicatedAllocation,
+ VkBuffer dedicatedBuffer,
+ VkImage dedicatedImage,
+ const VmaAllocationCreateInfo& createInfo,
+ uint32_t memTypeIndex,
+ VmaSuballocationType suballocType,
+ size_t allocationCount,
+ VmaAllocation* pAllocations)
+{
+ VMA_ASSERT(pAllocations != VMA_NULL);
+ VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
+
+ VmaAllocationCreateInfo finalCreateInfo = createInfo;
+
+ // If memory type is not HOST_VISIBLE, disable MAPPED.
+ if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
+ (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+ {
+ finalCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
+ }
+ // If memory is lazily allocated, it should be always dedicated.
+ if(finalCreateInfo.usage == VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED)
+ {
+ finalCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
+ }
+
+ VmaBlockVector* const blockVector = m_pBlockVectors[memTypeIndex];
+ VMA_ASSERT(blockVector);
+
+ const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
+ bool preferDedicatedMemory =
+ VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
+ dedicatedAllocation ||
+ // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size.
+ size > preferredBlockSize / 2;
+
+ if(preferDedicatedMemory &&
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 &&
+ finalCreateInfo.pool == VK_NULL_HANDLE)
+ {
+ finalCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
+ }
+
+ if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0)
+ {
+ if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+ {
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ else
+ {
+ return AllocateDedicatedMemory(
+ size,
+ suballocType,
+ memTypeIndex,
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0,
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
+ finalCreateInfo.pUserData,
+ dedicatedBuffer,
+ dedicatedImage,
+ allocationCount,
+ pAllocations);
+ }
+ }
+ else
+ {
+ VkResult res = blockVector->Allocate(
+ m_CurrentFrameIndex.load(),
+ size,
+ alignment,
+ finalCreateInfo,
+ suballocType,
+ allocationCount,
+ pAllocations);
+ if(res == VK_SUCCESS)
+ {
+ return res;
+ }
+
+ // 5. Try dedicated memory.
+ if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+ {
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ else
+ {
+ res = AllocateDedicatedMemory(
+ size,
+ suballocType,
+ memTypeIndex,
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0,
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0,
+ (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0,
+ finalCreateInfo.pUserData,
+ dedicatedBuffer,
+ dedicatedImage,
+ allocationCount,
+ pAllocations);
+ if(res == VK_SUCCESS)
+ {
+ // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here.
+ VMA_DEBUG_LOG(" Allocated as DedicatedMemory");
+ return VK_SUCCESS;
+ }
+ else
+ {
+ // Everything failed: Return error code.
+ VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
+ return res;
+ }
+ }
+ }
}
VkResult VmaAllocator_T::AllocateDedicatedMemory(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- bool withinBudget,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- VMA_ASSERT(allocationCount > 0 && pAllocations);
-
- if(withinBudget)
- {
- const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
- VmaBudget heapBudget = {};
- GetBudget(&heapBudget, heapIndex, 1);
- if(heapBudget.usage + size * allocationCount > heapBudget.budget)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
-
- VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
- allocInfo.memoryTypeIndex = memTypeIndex;
- allocInfo.allocationSize = size;
+ VkDeviceSize size,
+ VmaSuballocationType suballocType,
+ uint32_t memTypeIndex,
+ bool withinBudget,
+ bool map,
+ bool isUserDataString,
+ void* pUserData,
+ VkBuffer dedicatedBuffer,
+ VkImage dedicatedImage,
+ size_t allocationCount,
+ VmaAllocation* pAllocations)
+{
+ VMA_ASSERT(allocationCount > 0 && pAllocations);
+
+ if(withinBudget)
+ {
+ const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+ VmaBudget heapBudget = {};
+ GetBudget(&heapBudget, heapIndex, 1);
+ if(heapBudget.usage + size * allocationCount > heapBudget.budget)
+ {
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ }
+
+ VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
+ allocInfo.memoryTypeIndex = memTypeIndex;
+ allocInfo.allocationSize = size;
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
- if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- if(dedicatedBuffer != VK_NULL_HANDLE)
- {
- VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
- dedicatedAllocInfo.buffer = dedicatedBuffer;
- allocInfo.pNext = &dedicatedAllocInfo;
- }
- else if(dedicatedImage != VK_NULL_HANDLE)
- {
- dedicatedAllocInfo.image = dedicatedImage;
- allocInfo.pNext = &dedicatedAllocInfo;
- }
- }
+ VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
+ if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ if(dedicatedBuffer != VK_NULL_HANDLE)
+ {
+ VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
+ dedicatedAllocInfo.buffer = dedicatedBuffer;
+ allocInfo.pNext = &dedicatedAllocInfo;
+ }
+ else if(dedicatedImage != VK_NULL_HANDLE)
+ {
+ dedicatedAllocInfo.image = dedicatedImage;
+ allocInfo.pNext = &dedicatedAllocInfo;
+ }
+ }
#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- size_t allocIndex;
- VkResult res = VK_SUCCESS;
- for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- res = AllocateDedicatedMemoryPage(
- size,
- suballocType,
- memTypeIndex,
- allocInfo,
- map,
- isUserDataString,
- pUserData,
- pAllocations + allocIndex);
- if(res != VK_SUCCESS)
- {
- break;
- }
- }
-
- if(res == VK_SUCCESS)
- {
- // Register them in m_pDedicatedAllocations.
- {
- VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocations);
- for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
- {
- VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
- }
- }
-
- VMA_DEBUG_LOG(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
- }
- else
- {
- // Free all already created allocations.
- while(allocIndex--)
- {
- VmaAllocation currAlloc = pAllocations[allocIndex];
- VkDeviceMemory hMemory = currAlloc->GetMemory();
-
- /*
- There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
- before vkFreeMemory.
-
- if(currAlloc->GetMappedData() != VMA_NULL)
- {
- (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
- }
- */
-
- FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
- m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
- currAlloc->SetUserData(this, VMA_NULL);
- m_AllocationObjectAllocator.Free(currAlloc);
- }
-
- memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
- }
-
- return res;
+ size_t allocIndex;
+ VkResult res = VK_SUCCESS;
+ for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+ {
+ res = AllocateDedicatedMemoryPage(
+ size,
+ suballocType,
+ memTypeIndex,
+ allocInfo,
+ map,
+ isUserDataString,
+ pUserData,
+ pAllocations + allocIndex);
+ if(res != VK_SUCCESS)
+ {
+ break;
+ }
+ }
+
+ if(res == VK_SUCCESS)
+ {
+ // Register them in m_pDedicatedAllocations.
+ {
+ VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+ AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
+ VMA_ASSERT(pDedicatedAllocations);
+ for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
+ {
+ VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
+ }
+ }
+
+ VMA_DEBUG_LOG(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
+ }
+ else
+ {
+ // Free all already created allocations.
+ while(allocIndex--)
+ {
+ VmaAllocation currAlloc = pAllocations[allocIndex];
+ VkDeviceMemory hMemory = currAlloc->GetMemory();
+
+ /*
+ There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
+ before vkFreeMemory.
+
+ if(currAlloc->GetMappedData() != VMA_NULL)
+ {
+ (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
+ }
+ */
+
+ FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
+ m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
+ currAlloc->SetUserData(this, VMA_NULL);
+ currAlloc->Dtor();
+ m_AllocationObjectAllocator.Free(currAlloc);
+ }
+
+ memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
+ }
+
+ return res;
}
VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
- VkDeviceSize size,
- VmaSuballocationType suballocType,
- uint32_t memTypeIndex,
- const VkMemoryAllocateInfo& allocInfo,
- bool map,
- bool isUserDataString,
- void* pUserData,
- VmaAllocation* pAllocation)
-{
- VkDeviceMemory hMemory = VK_NULL_HANDLE;
- VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
- if(res < 0)
- {
- VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
- return res;
- }
-
- void* pMappedData = VMA_NULL;
- if(map)
- {
- res = (*m_VulkanFunctions.vkMapMemory)(
- m_hDevice,
- hMemory,
- 0,
- VK_WHOLE_SIZE,
- 0,
- &pMappedData);
- if(res < 0)
- {
- VMA_DEBUG_LOG(" vkMapMemory FAILED");
- FreeVulkanMemory(memTypeIndex, size, hMemory);
- return res;
- }
- }
-
- *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
- (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
- (*pAllocation)->SetUserData(this, pUserData);
- m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
- }
-
- return VK_SUCCESS;
+ VkDeviceSize size,
+ VmaSuballocationType suballocType,
+ uint32_t memTypeIndex,
+ const VkMemoryAllocateInfo& allocInfo,
+ bool map,
+ bool isUserDataString,
+ void* pUserData,
+ VmaAllocation* pAllocation)
+{
+ VkDeviceMemory hMemory = VK_NULL_HANDLE;
+ VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
+ if(res < 0)
+ {
+ VMA_DEBUG_LOG(" vkAllocateMemory FAILED");
+ return res;
+ }
+
+ void* pMappedData = VMA_NULL;
+ if(map)
+ {
+ res = (*m_VulkanFunctions.vkMapMemory)(
+ m_hDevice,
+ hMemory,
+ 0,
+ VK_WHOLE_SIZE,
+ 0,
+ &pMappedData);
+ if(res < 0)
+ {
+ VMA_DEBUG_LOG(" vkMapMemory FAILED");
+ FreeVulkanMemory(memTypeIndex, size, hMemory);
+ return res;
+ }
+ }
+
+ *pAllocation = m_AllocationObjectAllocator.Allocate();
+ (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
+ (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
+ (*pAllocation)->SetUserData(this, pUserData);
+ m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
+ if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+ {
+ FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
+ }
+
+ return VK_SUCCESS;
}
void VmaAllocator_T::GetBufferMemoryRequirements(
- VkBuffer hBuffer,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const
+ VkBuffer hBuffer,
+ VkMemoryRequirements& memReq,
+ bool& requiresDedicatedAllocation,
+ bool& prefersDedicatedAllocation) const
{
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
- memReqInfo.buffer = hBuffer;
+ if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
+ memReqInfo.buffer = hBuffer;
- VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
+ VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
- VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
- memReq2.pNext = &memDedicatedReq;
+ VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
+ memReq2.pNext = &memDedicatedReq;
- (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
+ (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
- memReq = memReq2.memoryRequirements;
- requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
- prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
- }
- else
+ memReq = memReq2.memoryRequirements;
+ requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
+ prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
+ }
+ else
#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- {
- (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
- requiresDedicatedAllocation = false;
- prefersDedicatedAllocation = false;
- }
+ {
+ (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
+ requiresDedicatedAllocation = false;
+ prefersDedicatedAllocation = false;
+ }
}
void VmaAllocator_T::GetImageMemoryRequirements(
- VkImage hImage,
- VkMemoryRequirements& memReq,
- bool& requiresDedicatedAllocation,
- bool& prefersDedicatedAllocation) const
+ VkImage hImage,
+ VkMemoryRequirements& memReq,
+ bool& requiresDedicatedAllocation,
+ bool& prefersDedicatedAllocation) const
{
#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
- {
- VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
- memReqInfo.image = hImage;
+ if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
+ {
+ VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
+ memReqInfo.image = hImage;
- VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
+ VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
- VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
- memReq2.pNext = &memDedicatedReq;
+ VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
+ memReq2.pNext = &memDedicatedReq;
- (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
+ (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
- memReq = memReq2.memoryRequirements;
- requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
- prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
- }
- else
+ memReq = memReq2.memoryRequirements;
+ requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
+ prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
+ }
+ else
#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
- {
- (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
- requiresDedicatedAllocation = false;
- prefersDedicatedAllocation = false;
- }
+ {
+ (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
+ requiresDedicatedAllocation = false;
+ prefersDedicatedAllocation = false;
+ }
}
VkResult VmaAllocator_T::AllocateMemory(
- const VkMemoryRequirements& vkMemReq,
- bool requiresDedicatedAllocation,
- bool prefersDedicatedAllocation,
- VkBuffer dedicatedBuffer,
- VkImage dedicatedImage,
- const VmaAllocationCreateInfo& createInfo,
- VmaSuballocationType suballocType,
- size_t allocationCount,
- VmaAllocation* pAllocations)
-{
- memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
-
- VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
-
- if(vkMemReq.size == 0)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
- (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
- (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0)
- {
- VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if(requiresDedicatedAllocation)
- {
- if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
- {
- VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if(createInfo.pool != VK_NULL_HANDLE)
- {
- VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- if((createInfo.pool != VK_NULL_HANDLE) &&
- ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
- {
- VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
-
- if(createInfo.pool != VK_NULL_HANDLE)
- {
- const VkDeviceSize alignmentForPool = VMA_MAX(
- vkMemReq.alignment,
- GetMemoryTypeMinAlignment(createInfo.pool->m_BlockVector.GetMemoryTypeIndex()));
-
- VmaAllocationCreateInfo createInfoForPool = createInfo;
- // If memory type is not HOST_VISIBLE, disable MAPPED.
- if((createInfoForPool.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
- (m_MemProps.memoryTypes[createInfo.pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- createInfoForPool.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
- }
-
- return createInfo.pool->m_BlockVector.Allocate(
- m_CurrentFrameIndex.load(),
- vkMemReq.size,
- alignmentForPool,
- createInfoForPool,
- suballocType,
- allocationCount,
- pAllocations);
- }
- else
- {
- // Bit mask of memory Vulkan types acceptable for this allocation.
- uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
- uint32_t memTypeIndex = UINT32_MAX;
- VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
- if(res == VK_SUCCESS)
- {
- VkDeviceSize alignmentForMemType = VMA_MAX(
- vkMemReq.alignment,
- GetMemoryTypeMinAlignment(memTypeIndex));
-
- res = AllocateMemoryOfType(
- vkMemReq.size,
- alignmentForMemType,
- requiresDedicatedAllocation || prefersDedicatedAllocation,
- dedicatedBuffer,
- dedicatedImage,
- createInfo,
- memTypeIndex,
- suballocType,
- allocationCount,
- pAllocations);
- // Succeeded on first try.
- if(res == VK_SUCCESS)
- {
- return res;
- }
- // Allocation from this memory type failed. Try other compatible memory types.
- else
- {
- for(;;)
- {
- // Remove old memTypeIndex from list of possibilities.
- memoryTypeBits &= ~(1u << memTypeIndex);
- // Find alternative memTypeIndex.
- res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
- if(res == VK_SUCCESS)
- {
- alignmentForMemType = VMA_MAX(
- vkMemReq.alignment,
- GetMemoryTypeMinAlignment(memTypeIndex));
-
- res = AllocateMemoryOfType(
- vkMemReq.size,
- alignmentForMemType,
- requiresDedicatedAllocation || prefersDedicatedAllocation,
- dedicatedBuffer,
- dedicatedImage,
- createInfo,
- memTypeIndex,
- suballocType,
- allocationCount,
- pAllocations);
- // Allocation from this alternative memory type succeeded.
- if(res == VK_SUCCESS)
- {
- return res;
- }
- // else: Allocation from this memory type failed. Try next one - next loop iteration.
- }
- // No other matching memory type index could be found.
- else
- {
- // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- }
- }
- }
- // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
- else
- return res;
- }
+ const VkMemoryRequirements& vkMemReq,
+ bool requiresDedicatedAllocation,
+ bool prefersDedicatedAllocation,
+ VkBuffer dedicatedBuffer,
+ VkImage dedicatedImage,
+ const VmaAllocationCreateInfo& createInfo,
+ VmaSuballocationType suballocType,
+ size_t allocationCount,
+ VmaAllocation* pAllocations)
+{
+ memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount);
+
+ VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
+
+ if(vkMemReq.size == 0)
+ {
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ if((createInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 &&
+ (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+ {
+ VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ if((createInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
+ (createInfo.flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0)
+ {
+ VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ if(requiresDedicatedAllocation)
+ {
+ if((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0)
+ {
+ VMA_ASSERT(0 && "VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ if(createInfo.pool != VK_NULL_HANDLE)
+ {
+ VMA_ASSERT(0 && "Pool specified while dedicated allocation is required.");
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ }
+ if((createInfo.pool != VK_NULL_HANDLE) &&
+ ((createInfo.flags & (VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT)) != 0))
+ {
+ VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+
+ if(createInfo.pool != VK_NULL_HANDLE)
+ {
+ const VkDeviceSize alignmentForPool = VMA_MAX(
+ vkMemReq.alignment,
+ GetMemoryTypeMinAlignment(createInfo.pool->m_BlockVector.GetMemoryTypeIndex()));
+
+ VmaAllocationCreateInfo createInfoForPool = createInfo;
+ // If memory type is not HOST_VISIBLE, disable MAPPED.
+ if((createInfoForPool.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 &&
+ (m_MemProps.memoryTypes[createInfo.pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+ {
+ createInfoForPool.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT;
+ }
+
+ return createInfo.pool->m_BlockVector.Allocate(
+ m_CurrentFrameIndex.load(),
+ vkMemReq.size,
+ alignmentForPool,
+ createInfoForPool,
+ suballocType,
+ allocationCount,
+ pAllocations);
+ }
+ else
+ {
+ // Bit mask of memory Vulkan types acceptable for this allocation.
+ uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
+ uint32_t memTypeIndex = UINT32_MAX;
+ VkResult res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
+ if(res == VK_SUCCESS)
+ {
+ VkDeviceSize alignmentForMemType = VMA_MAX(
+ vkMemReq.alignment,
+ GetMemoryTypeMinAlignment(memTypeIndex));
+
+ res = AllocateMemoryOfType(
+ vkMemReq.size,
+ alignmentForMemType,
+ requiresDedicatedAllocation || prefersDedicatedAllocation,
+ dedicatedBuffer,
+ dedicatedImage,
+ createInfo,
+ memTypeIndex,
+ suballocType,
+ allocationCount,
+ pAllocations);
+ // Succeeded on first try.
+ if(res == VK_SUCCESS)
+ {
+ return res;
+ }
+ // Allocation from this memory type failed. Try other compatible memory types.
+ else
+ {
+ for(;;)
+ {
+ // Remove old memTypeIndex from list of possibilities.
+ memoryTypeBits &= ~(1u << memTypeIndex);
+ // Find alternative memTypeIndex.
+ res = vmaFindMemoryTypeIndex(this, memoryTypeBits, &createInfo, &memTypeIndex);
+ if(res == VK_SUCCESS)
+ {
+ alignmentForMemType = VMA_MAX(
+ vkMemReq.alignment,
+ GetMemoryTypeMinAlignment(memTypeIndex));
+
+ res = AllocateMemoryOfType(
+ vkMemReq.size,
+ alignmentForMemType,
+ requiresDedicatedAllocation || prefersDedicatedAllocation,
+ dedicatedBuffer,
+ dedicatedImage,
+ createInfo,
+ memTypeIndex,
+ suballocType,
+ allocationCount,
+ pAllocations);
+ // Allocation from this alternative memory type succeeded.
+ if(res == VK_SUCCESS)
+ {
+ return res;
+ }
+ // else: Allocation from this memory type failed. Try next one - next loop iteration.
+ }
+ // No other matching memory type index could be found.
+ else
+ {
+ // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once.
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ }
+ }
+ }
+ // Can't find any single memory type maching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT.
+ else
+ return res;
+ }
}
void VmaAllocator_T::FreeMemory(
- size_t allocationCount,
- const VmaAllocation* pAllocations)
-{
- VMA_ASSERT(pAllocations);
-
- for(size_t allocIndex = allocationCount; allocIndex--; )
- {
- VmaAllocation allocation = pAllocations[allocIndex];
-
- if(allocation != VK_NULL_HANDLE)
- {
- if(TouchAllocation(allocation))
- {
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
- {
- FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
- }
-
- switch(allocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaBlockVector* pBlockVector = VMA_NULL;
- VmaPool hPool = allocation->GetBlock()->GetParentPool();
- if(hPool != VK_NULL_HANDLE)
- {
- pBlockVector = &hPool->m_BlockVector;
- }
- else
- {
- const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
- pBlockVector = m_pBlockVectors[memTypeIndex];
- }
- pBlockVector->Free(allocation);
- }
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- FreeDedicatedMemory(allocation);
- break;
- default:
- VMA_ASSERT(0);
- }
- }
-
- // Do this regardless of whether the allocation is lost. Lost allocations still account to Budget.AllocationBytes.
- m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
- allocation->SetUserData(this, VMA_NULL);
- m_AllocationObjectAllocator.Free(allocation);
- }
- }
+ size_t allocationCount,
+ const VmaAllocation* pAllocations)
+{
+ VMA_ASSERT(pAllocations);
+
+ for(size_t allocIndex = allocationCount; allocIndex--; )
+ {
+ VmaAllocation allocation = pAllocations[allocIndex];
+
+ if(allocation != VK_NULL_HANDLE)
+ {
+ if(TouchAllocation(allocation))
+ {
+ if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
+ {
+ FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
+ }
+
+ switch(allocation->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ {
+ VmaBlockVector* pBlockVector = VMA_NULL;
+ VmaPool hPool = allocation->GetBlock()->GetParentPool();
+ if(hPool != VK_NULL_HANDLE)
+ {
+ pBlockVector = &hPool->m_BlockVector;
+ }
+ else
+ {
+ const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+ pBlockVector = m_pBlockVectors[memTypeIndex];
+ }
+ pBlockVector->Free(allocation);
+ }
+ break;
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ FreeDedicatedMemory(allocation);
+ break;
+ default:
+ VMA_ASSERT(0);
+ }
+ }
+
+ // Do this regardless of whether the allocation is lost. Lost allocations still account to Budget.AllocationBytes.
+ m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
+ allocation->SetUserData(this, VMA_NULL);
+ allocation->Dtor();
+ m_AllocationObjectAllocator.Free(allocation);
+ }
+ }
}
VkResult VmaAllocator_T::ResizeAllocation(
- const VmaAllocation alloc,
- VkDeviceSize newSize)
+ const VmaAllocation alloc,
+ VkDeviceSize newSize)
{
- // This function is deprecated and so it does nothing. It's left for backward compatibility.
- if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
- if(newSize == alloc->GetSize())
- {
- return VK_SUCCESS;
- }
- return VK_ERROR_OUT_OF_POOL_MEMORY;
+ // This function is deprecated and so it does nothing. It's left for backward compatibility.
+ if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
+ {
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+ if(newSize == alloc->GetSize())
+ {
+ return VK_SUCCESS;
+ }
+ return VK_ERROR_OUT_OF_POOL_MEMORY;
}
void VmaAllocator_T::CalculateStats(VmaStats* pStats)
{
- // Initialize.
- InitStatInfo(pStats->total);
- for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
- InitStatInfo(pStats->memoryType[i]);
- for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
- InitStatInfo(pStats->memoryHeap[i]);
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
- VMA_ASSERT(pBlockVector);
- pBlockVector->AddStats(pStats);
- }
-
- // Process custom pools.
- {
- VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
- for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
- {
- m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
- }
- }
-
- // Process dedicated allocations.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
- VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocVector);
- for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
- {
- VmaStatInfo allocationStatInfo;
- (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
- VmaAddStatInfo(pStats->total, allocationStatInfo);
- VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
- VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
- }
- }
-
- // Postprocess.
- VmaPostprocessCalcStatInfo(pStats->total);
- for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
- VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
- for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
- VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
+ // Initialize.
+ InitStatInfo(pStats->total);
+ for(size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
+ InitStatInfo(pStats->memoryType[i]);
+ for(size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
+ InitStatInfo(pStats->memoryHeap[i]);
+
+ // Process default pools.
+ for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+ {
+ VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
+ VMA_ASSERT(pBlockVector);
+ pBlockVector->AddStats(pStats);
+ }
+
+ // Process custom pools.
+ {
+ VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+ for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
+ {
+ m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
+ }
+ }
+
+ // Process dedicated allocations.
+ for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+ {
+ const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
+ VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+ AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
+ VMA_ASSERT(pDedicatedAllocVector);
+ for(size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
+ {
+ VmaStatInfo allocationStatInfo;
+ (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
+ VmaAddStatInfo(pStats->total, allocationStatInfo);
+ VmaAddStatInfo(pStats->memoryType[memTypeIndex], allocationStatInfo);
+ VmaAddStatInfo(pStats->memoryHeap[memHeapIndex], allocationStatInfo);
+ }
+ }
+
+ // Postprocess.
+ VmaPostprocessCalcStatInfo(pStats->total);
+ for(size_t i = 0; i < GetMemoryTypeCount(); ++i)
+ VmaPostprocessCalcStatInfo(pStats->memoryType[i]);
+ for(size_t i = 0; i < GetMemoryHeapCount(); ++i)
+ VmaPostprocessCalcStatInfo(pStats->memoryHeap[i]);
}
void VmaAllocator_T::GetBudget(VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
{
#if VMA_MEMORY_BUDGET
- if(m_UseExtMemoryBudget)
- {
- if(m_Budget.m_OperationsSinceBudgetFetch < 30)
- {
- VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
- for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
- {
- const uint32_t heapIndex = firstHeap + i;
-
- outBudget->blockBytes = m_Budget.m_BlockBytes[heapIndex];
- outBudget->allocationBytes = m_Budget.m_AllocationBytes[heapIndex];
-
- if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
- {
- outBudget->usage = m_Budget.m_VulkanUsage[heapIndex] +
- outBudget->blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
- }
- else
- {
- outBudget->usage = 0;
- }
-
- // Have to take MIN with heap size because explicit HeapSizeLimit is included in it.
- outBudget->budget = VMA_MIN(
- m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
- }
- }
- else
- {
- UpdateVulkanBudget(); // Outside of mutex lock
- GetBudget(outBudget, firstHeap, heapCount); // Recursion
- }
- }
- else
+ if(m_UseExtMemoryBudget)
+ {
+ if(m_Budget.m_OperationsSinceBudgetFetch < 30)
+ {
+ VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
+ for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
+ {
+ const uint32_t heapIndex = firstHeap + i;
+
+ outBudget->blockBytes = m_Budget.m_BlockBytes[heapIndex];
+ outBudget->allocationBytes = m_Budget.m_AllocationBytes[heapIndex];
+
+ if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
+ {
+ outBudget->usage = m_Budget.m_VulkanUsage[heapIndex] +
+ outBudget->blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
+ }
+ else
+ {
+ outBudget->usage = 0;
+ }
+
+ // Have to take MIN with heap size because explicit HeapSizeLimit is included in it.
+ outBudget->budget = VMA_MIN(
+ m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
+ }
+ }
+ else
+ {
+ UpdateVulkanBudget(); // Outside of mutex lock
+ GetBudget(outBudget, firstHeap, heapCount); // Recursion
+ }
+ }
+ else
#endif
- {
- for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
- {
- const uint32_t heapIndex = firstHeap + i;
+ {
+ for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
+ {
+ const uint32_t heapIndex = firstHeap + i;
- outBudget->blockBytes = m_Budget.m_BlockBytes[heapIndex];
- outBudget->allocationBytes = m_Budget.m_AllocationBytes[heapIndex];
+ outBudget->blockBytes = m_Budget.m_BlockBytes[heapIndex];
+ outBudget->allocationBytes = m_Budget.m_AllocationBytes[heapIndex];
- outBudget->usage = outBudget->blockBytes;
- outBudget->budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics.
- }
- }
+ outBudget->usage = outBudget->blockBytes;
+ outBudget->budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics.
+ }
+ }
}
static const uint32_t VMA_VENDOR_ID_AMD = 4098;
VkResult VmaAllocator_T::DefragmentationBegin(
- const VmaDefragmentationInfo2& info,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext* pContext)
+ const VmaDefragmentationInfo2& info,
+ VmaDefragmentationStats* pStats,
+ VmaDefragmentationContext* pContext)
{
- if(info.pAllocationsChanged != VMA_NULL)
- {
- memset(info.pAllocationsChanged, 0, info.allocationCount * sizeof(VkBool32));
- }
+ if(info.pAllocationsChanged != VMA_NULL)
+ {
+ memset(info.pAllocationsChanged, 0, info.allocationCount * sizeof(VkBool32));
+ }
- *pContext = vma_new(this, VmaDefragmentationContext_T)(
- this, m_CurrentFrameIndex.load(), info.flags, pStats);
+ *pContext = vma_new(this, VmaDefragmentationContext_T)(
+ this, m_CurrentFrameIndex.load(), info.flags, pStats);
- (*pContext)->AddPools(info.poolCount, info.pPools);
- (*pContext)->AddAllocations(
- info.allocationCount, info.pAllocations, info.pAllocationsChanged);
+ (*pContext)->AddPools(info.poolCount, info.pPools);
+ (*pContext)->AddAllocations(
+ info.allocationCount, info.pAllocations, info.pAllocationsChanged);
- VkResult res = (*pContext)->Defragment(
- info.maxCpuBytesToMove, info.maxCpuAllocationsToMove,
- info.maxGpuBytesToMove, info.maxGpuAllocationsToMove,
- info.commandBuffer, pStats, info.flags);
+ VkResult res = (*pContext)->Defragment(
+ info.maxCpuBytesToMove, info.maxCpuAllocationsToMove,
+ info.maxGpuBytesToMove, info.maxGpuAllocationsToMove,
+ info.commandBuffer, pStats);
- if(res != VK_NOT_READY)
- {
- vma_delete(this, *pContext);
- *pContext = VMA_NULL;
- }
+ if(res != VK_NOT_READY)
+ {
+ vma_delete(this, *pContext);
+ *pContext = VMA_NULL;
+ }
- return res;
+ return res;
}
VkResult VmaAllocator_T::DefragmentationEnd(
- VmaDefragmentationContext context)
-{
- vma_delete(this, context);
- return VK_SUCCESS;
-}
-
-VkResult VmaAllocator_T::DefragmentationPassBegin(
- VmaDefragmentationPassInfo* pInfo,
- VmaDefragmentationContext context)
-{
- return context->DefragmentPassBegin(pInfo);
-}
-VkResult VmaAllocator_T::DefragmentationPassEnd(
- VmaDefragmentationContext context)
+ VmaDefragmentationContext context)
{
- return context->DefragmentPassEnd();
-
+ vma_delete(this, context);
+ return VK_SUCCESS;
}
void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo)
{
- if(hAllocation->CanBecomeLost())
- {
- /*
- Warning: This is a carefully designed algorithm.
- Do not modify unless you really know what you're doing :)
- */
- const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
- {
- pAllocationInfo->memoryType = UINT32_MAX;
- pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
- pAllocationInfo->offset = 0;
- pAllocationInfo->size = hAllocation->GetSize();
- pAllocationInfo->pMappedData = VMA_NULL;
- pAllocationInfo->pUserData = hAllocation->GetUserData();
- return;
- }
- else if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
- pAllocationInfo->deviceMemory = hAllocation->GetMemory();
- pAllocationInfo->offset = hAllocation->GetOffset();
- pAllocationInfo->size = hAllocation->GetSize();
- pAllocationInfo->pMappedData = VMA_NULL;
- pAllocationInfo->pUserData = hAllocation->GetUserData();
- return;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
- }
- else
- {
+ if(hAllocation->CanBecomeLost())
+ {
+ /*
+ Warning: This is a carefully designed algorithm.
+ Do not modify unless you really know what you're doing :)
+ */
+ const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+ uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+ for(;;)
+ {
+ if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+ {
+ pAllocationInfo->memoryType = UINT32_MAX;
+ pAllocationInfo->deviceMemory = VK_NULL_HANDLE;
+ pAllocationInfo->offset = 0;
+ pAllocationInfo->size = hAllocation->GetSize();
+ pAllocationInfo->pMappedData = VMA_NULL;
+ pAllocationInfo->pUserData = hAllocation->GetUserData();
+ return;
+ }
+ else if(localLastUseFrameIndex == localCurrFrameIndex)
+ {
+ pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
+ pAllocationInfo->deviceMemory = hAllocation->GetMemory();
+ pAllocationInfo->offset = hAllocation->GetOffset();
+ pAllocationInfo->size = hAllocation->GetSize();
+ pAllocationInfo->pMappedData = VMA_NULL;
+ pAllocationInfo->pUserData = hAllocation->GetUserData();
+ return;
+ }
+ else // Last use time earlier than current time.
+ {
+ if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+ {
+ localLastUseFrameIndex = localCurrFrameIndex;
+ }
+ }
+ }
+ }
+ else
+ {
#if VMA_STATS_STRING_ENABLED
- uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
- if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- break;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
+ uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+ uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+ for(;;)
+ {
+ VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
+ if(localLastUseFrameIndex == localCurrFrameIndex)
+ {
+ break;
+ }
+ else // Last use time earlier than current time.
+ {
+ if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+ {
+ localLastUseFrameIndex = localCurrFrameIndex;
+ }
+ }
+ }
#endif
- pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
- pAllocationInfo->deviceMemory = hAllocation->GetMemory();
- pAllocationInfo->offset = hAllocation->GetOffset();
- pAllocationInfo->size = hAllocation->GetSize();
- pAllocationInfo->pMappedData = hAllocation->GetMappedData();
- pAllocationInfo->pUserData = hAllocation->GetUserData();
- }
+ pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex();
+ pAllocationInfo->deviceMemory = hAllocation->GetMemory();
+ pAllocationInfo->offset = hAllocation->GetOffset();
+ pAllocationInfo->size = hAllocation->GetSize();
+ pAllocationInfo->pMappedData = hAllocation->GetMappedData();
+ pAllocationInfo->pUserData = hAllocation->GetUserData();
+ }
}
bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
{
- // This is a stripped-down version of VmaAllocator_T::GetAllocationInfo.
- if(hAllocation->CanBecomeLost())
- {
- uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
- {
- return false;
- }
- else if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- return true;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
- }
- else
- {
+ // This is a stripped-down version of VmaAllocator_T::GetAllocationInfo.
+ if(hAllocation->CanBecomeLost())
+ {
+ uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+ uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+ for(;;)
+ {
+ if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
+ {
+ return false;
+ }
+ else if(localLastUseFrameIndex == localCurrFrameIndex)
+ {
+ return true;
+ }
+ else // Last use time earlier than current time.
+ {
+ if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+ {
+ localLastUseFrameIndex = localCurrFrameIndex;
+ }
+ }
+ }
+ }
+ else
+ {
#if VMA_STATS_STRING_ENABLED
- uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
- uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
- for(;;)
- {
- VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
- if(localLastUseFrameIndex == localCurrFrameIndex)
- {
- break;
- }
- else // Last use time earlier than current time.
- {
- if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
- {
- localLastUseFrameIndex = localCurrFrameIndex;
- }
- }
- }
+ uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
+ uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
+ for(;;)
+ {
+ VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
+ if(localLastUseFrameIndex == localCurrFrameIndex)
+ {
+ break;
+ }
+ else // Last use time earlier than current time.
+ {
+ if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
+ {
+ localLastUseFrameIndex = localCurrFrameIndex;
+ }
+ }
+ }
#endif
- return true;
- }
+ return true;
+ }
}
VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
{
- VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->memoryTypeIndex, pCreateInfo->flags);
+ VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->memoryTypeIndex, pCreateInfo->flags);
- VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
+ VmaPoolCreateInfo newCreateInfo = *pCreateInfo;
- if(newCreateInfo.maxBlockCount == 0)
- {
- newCreateInfo.maxBlockCount = SIZE_MAX;
- }
- if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount)
- {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
- // Memory type index out of range or forbidden.
- if(pCreateInfo->memoryTypeIndex >= GetMemoryTypeCount() ||
- ((1u << pCreateInfo->memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
- {
- return VK_ERROR_FEATURE_NOT_PRESENT;
- }
+ if(newCreateInfo.maxBlockCount == 0)
+ {
+ newCreateInfo.maxBlockCount = SIZE_MAX;
+ }
+ if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount)
+ {
+ return VK_ERROR_INITIALIZATION_FAILED;
+ }
- const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
+ const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex);
- *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize);
+ *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize);
- VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
- if(res != VK_SUCCESS)
- {
- vma_delete(this, *pPool);
- *pPool = VMA_NULL;
- return res;
- }
+ VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
+ if(res != VK_SUCCESS)
+ {
+ vma_delete(this, *pPool);
+ *pPool = VMA_NULL;
+ return res;
+ }
- // Add to m_Pools.
- {
- VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
- (*pPool)->SetId(m_NextPoolId++);
- VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
- }
+ // Add to m_Pools.
+ {
+ VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
+ (*pPool)->SetId(m_NextPoolId++);
+ VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
+ }
- return VK_SUCCESS;
+ return VK_SUCCESS;
}
void VmaAllocator_T::DestroyPool(VmaPool pool)
{
- // Remove from m_Pools.
- {
- VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
- bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
- VMA_ASSERT(success && "Pool not found in Allocator.");
- }
+ // Remove from m_Pools.
+ {
+ VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
+ bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
+ VMA_ASSERT(success && "Pool not found in Allocator.");
+ }
- vma_delete(this, pool);
+ vma_delete(this, pool);
}
void VmaAllocator_T::GetPoolStats(VmaPool pool, VmaPoolStats* pPoolStats)
{
- pool->m_BlockVector.GetPoolStats(pPoolStats);
+ pool->m_BlockVector.GetPoolStats(pPoolStats);
}
void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
{
- m_CurrentFrameIndex.store(frameIndex);
+ m_CurrentFrameIndex.store(frameIndex);
#if VMA_MEMORY_BUDGET
- if(m_UseExtMemoryBudget)
- {
- UpdateVulkanBudget();
- }
+ if(m_UseExtMemoryBudget)
+ {
+ UpdateVulkanBudget();
+ }
#endif // #if VMA_MEMORY_BUDGET
}
void VmaAllocator_T::MakePoolAllocationsLost(
- VmaPool hPool,
- size_t* pLostAllocationCount)
+ VmaPool hPool,
+ size_t* pLostAllocationCount)
{
- hPool->m_BlockVector.MakePoolAllocationsLost(
- m_CurrentFrameIndex.load(),
- pLostAllocationCount);
+ hPool->m_BlockVector.MakePoolAllocationsLost(
+ m_CurrentFrameIndex.load(),
+ pLostAllocationCount);
}
VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool)
{
- return hPool->m_BlockVector.CheckCorruption();
+ return hPool->m_BlockVector.CheckCorruption();
}
VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
{
- VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
-
- // Process default pools.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- if(((1u << memTypeIndex) & memoryTypeBits) != 0)
- {
- VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
- VMA_ASSERT(pBlockVector);
- VkResult localRes = pBlockVector->CheckCorruption();
- switch(localRes)
- {
- case VK_ERROR_FEATURE_NOT_PRESENT:
- break;
- case VK_SUCCESS:
- finalRes = VK_SUCCESS;
- break;
- default:
- return localRes;
- }
- }
- }
-
- // Process custom pools.
- {
- VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
- for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
- {
- if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
- {
- VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
- switch(localRes)
- {
- case VK_ERROR_FEATURE_NOT_PRESENT:
- break;
- case VK_SUCCESS:
- finalRes = VK_SUCCESS;
- break;
- default:
- return localRes;
- }
- }
- }
- }
-
- return finalRes;
+ VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
+
+ // Process default pools.
+ for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+ {
+ if(((1u << memTypeIndex) & memoryTypeBits) != 0)
+ {
+ VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex];
+ VMA_ASSERT(pBlockVector);
+ VkResult localRes = pBlockVector->CheckCorruption();
+ switch(localRes)
+ {
+ case VK_ERROR_FEATURE_NOT_PRESENT:
+ break;
+ case VK_SUCCESS:
+ finalRes = VK_SUCCESS;
+ break;
+ default:
+ return localRes;
+ }
+ }
+ }
+
+ // Process custom pools.
+ {
+ VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+ for(size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
+ {
+ if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
+ {
+ VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
+ switch(localRes)
+ {
+ case VK_ERROR_FEATURE_NOT_PRESENT:
+ break;
+ case VK_SUCCESS:
+ finalRes = VK_SUCCESS;
+ break;
+ default:
+ return localRes;
+ }
+ }
+ }
+ }
+
+ return finalRes;
}
void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
{
- *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST, false);
- (*pAllocation)->InitLost();
+ *pAllocation = m_AllocationObjectAllocator.Allocate();
+ (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST, false);
+ (*pAllocation)->InitLost();
}
VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
{
- const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
-
- // HeapSizeLimit is in effect for this heap.
- if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
- {
- const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
- VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
- for(;;)
- {
- const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
- if(blockBytesAfterAllocation > heapSize)
- {
- return VK_ERROR_OUT_OF_DEVICE_MEMORY;
- }
- if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
- {
- break;
- }
- }
- }
- else
- {
- m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
- }
-
- // VULKAN CALL vkAllocateMemory.
- VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
-
- if(res == VK_SUCCESS)
- {
+ const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
+
+ // HeapSizeLimit is in effect for this heap.
+ if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
+ {
+ const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
+ VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
+ for(;;)
+ {
+ const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
+ if(blockBytesAfterAllocation > heapSize)
+ {
+ return VK_ERROR_OUT_OF_DEVICE_MEMORY;
+ }
+ if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
+ {
+ break;
+ }
+ }
+ }
+ else
+ {
+ m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
+ }
+
+ // VULKAN CALL vkAllocateMemory.
+ VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
+
+ if(res == VK_SUCCESS)
+ {
#if VMA_MEMORY_BUDGET
- ++m_Budget.m_OperationsSinceBudgetFetch;
+ ++m_Budget.m_OperationsSinceBudgetFetch;
#endif
- // Informative callback.
- if(m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
- {
- (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
- }
- }
- else
- {
- m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
- }
+ // Informative callback.
+ if(m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
+ {
+ (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
+ }
+ }
+ else
+ {
+ m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
+ }
- return res;
+ return res;
}
void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
{
- // Informative callback.
- if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
- {
- (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
- }
+ // Informative callback.
+ if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
+ {
+ (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size);
+ }
- // VULKAN CALL vkFreeMemory.
- (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
+ // VULKAN CALL vkFreeMemory.
+ (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
- m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
+ m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
}
VkResult VmaAllocator_T::BindVulkanBuffer(
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkBuffer buffer,
- const void* pNext)
+ VkDeviceMemory memory,
+ VkDeviceSize memoryOffset,
+ VkBuffer buffer,
+ const void* pNext)
{
- if(pNext != VMA_NULL)
- {
+ if(pNext != VMA_NULL)
+ {
#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
- if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
- m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
- {
- VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
- bindBufferMemoryInfo.pNext = pNext;
- bindBufferMemoryInfo.buffer = buffer;
- bindBufferMemoryInfo.memory = memory;
- bindBufferMemoryInfo.memoryOffset = memoryOffset;
- return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
- }
- else
+ if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
+ m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
+ {
+ VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
+ bindBufferMemoryInfo.pNext = pNext;
+ bindBufferMemoryInfo.buffer = buffer;
+ bindBufferMemoryInfo.memory = memory;
+ bindBufferMemoryInfo.memoryOffset = memoryOffset;
+ return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
+ }
+ else
#endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
- {
- return VK_ERROR_EXTENSION_NOT_PRESENT;
- }
- }
- else
- {
- return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
- }
+ {
+ return VK_ERROR_EXTENSION_NOT_PRESENT;
+ }
+ }
+ else
+ {
+ return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
+ }
}
VkResult VmaAllocator_T::BindVulkanImage(
- VkDeviceMemory memory,
- VkDeviceSize memoryOffset,
- VkImage image,
- const void* pNext)
+ VkDeviceMemory memory,
+ VkDeviceSize memoryOffset,
+ VkImage image,
+ const void* pNext)
{
- if(pNext != VMA_NULL)
- {
+ if(pNext != VMA_NULL)
+ {
#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
- if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
- m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
- {
- VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
- bindBufferMemoryInfo.pNext = pNext;
- bindBufferMemoryInfo.image = image;
- bindBufferMemoryInfo.memory = memory;
- bindBufferMemoryInfo.memoryOffset = memoryOffset;
- return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
- }
- else
+ if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
+ m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
+ {
+ VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
+ bindBufferMemoryInfo.pNext = pNext;
+ bindBufferMemoryInfo.image = image;
+ bindBufferMemoryInfo.memory = memory;
+ bindBufferMemoryInfo.memoryOffset = memoryOffset;
+ return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
+ }
+ else
#endif // #if VMA_BIND_MEMORY2
- {
- return VK_ERROR_EXTENSION_NOT_PRESENT;
- }
- }
- else
- {
- return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
- }
+ {
+ return VK_ERROR_EXTENSION_NOT_PRESENT;
+ }
+ }
+ else
+ {
+ return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
+ }
}
VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData)
{
- if(hAllocation->CanBecomeLost())
- {
- return VK_ERROR_MEMORY_MAP_FAILED;
- }
-
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
- char *pBytes = VMA_NULL;
- VkResult res = pBlock->Map(this, 1, (void**)&pBytes);
- if(res == VK_SUCCESS)
- {
- *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
- hAllocation->BlockAllocMap();
- }
- return res;
- }
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- return hAllocation->DedicatedAllocMap(this, ppData);
- default:
- VMA_ASSERT(0);
- return VK_ERROR_MEMORY_MAP_FAILED;
- }
+ if(hAllocation->CanBecomeLost())
+ {
+ return VK_ERROR_MEMORY_MAP_FAILED;
+ }
+
+ switch(hAllocation->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ {
+ VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
+ char *pBytes = VMA_NULL;
+ VkResult res = pBlock->Map(this, 1, (void**)&pBytes);
+ if(res == VK_SUCCESS)
+ {
+ *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
+ hAllocation->BlockAllocMap();
+ }
+ return res;
+ }
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ return hAllocation->DedicatedAllocMap(this, ppData);
+ default:
+ VMA_ASSERT(0);
+ return VK_ERROR_MEMORY_MAP_FAILED;
+ }
}
void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
{
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
- hAllocation->BlockAllocUnmap();
- pBlock->Unmap(this, 1);
- }
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- hAllocation->DedicatedAllocUnmap(this);
- break;
- default:
- VMA_ASSERT(0);
- }
+ switch(hAllocation->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ {
+ VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
+ hAllocation->BlockAllocUnmap();
+ pBlock->Unmap(this, 1);
+ }
+ break;
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ hAllocation->DedicatedAllocUnmap(this);
+ break;
+ default:
+ VMA_ASSERT(0);
+ }
}
VkResult VmaAllocator_T::BindBufferMemory(
- VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkBuffer hBuffer,
- const void* pNext)
-{
- VkResult res = VK_SUCCESS;
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
- VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
- res = pBlock->BindBufferMemory(this, hAllocation, allocationLocalOffset, hBuffer, pNext);
- break;
- }
- default:
- VMA_ASSERT(0);
- }
- return res;
+ VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkBuffer hBuffer,
+ const void* pNext)
+{
+ VkResult res = VK_SUCCESS;
+ switch(hAllocation->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
+ break;
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ {
+ VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock();
+ VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
+ res = pBlock->BindBufferMemory(this, hAllocation, allocationLocalOffset, hBuffer, pNext);
+ break;
+ }
+ default:
+ VMA_ASSERT(0);
+ }
+ return res;
}
VkResult VmaAllocator_T::BindImageMemory(
- VmaAllocation hAllocation,
- VkDeviceSize allocationLocalOffset,
- VkImage hImage,
- const void* pNext)
-{
- VkResult res = VK_SUCCESS;
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
- break;
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
- VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
- res = pBlock->BindImageMemory(this, hAllocation, allocationLocalOffset, hImage, pNext);
- break;
- }
- default:
- VMA_ASSERT(0);
- }
- return res;
+ VmaAllocation hAllocation,
+ VkDeviceSize allocationLocalOffset,
+ VkImage hImage,
+ const void* pNext)
+{
+ VkResult res = VK_SUCCESS;
+ switch(hAllocation->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
+ break;
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ {
+ VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
+ VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
+ res = pBlock->BindImageMemory(this, hAllocation, allocationLocalOffset, hImage, pNext);
+ break;
+ }
+ default:
+ VMA_ASSERT(0);
+ }
+ return res;
}
void VmaAllocator_T::FlushOrInvalidateAllocation(
- VmaAllocation hAllocation,
- VkDeviceSize offset, VkDeviceSize size,
- VMA_CACHE_OPERATION op)
-{
- const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
- if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
- {
- const VkDeviceSize allocationSize = hAllocation->GetSize();
- VMA_ASSERT(offset <= allocationSize);
-
- const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
-
- VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
- memRange.memory = hAllocation->GetMemory();
-
- switch(hAllocation->GetType())
- {
- case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
- memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
- if(size == VK_WHOLE_SIZE)
- {
- memRange.size = allocationSize - memRange.offset;
- }
- else
- {
- VMA_ASSERT(offset + size <= allocationSize);
- memRange.size = VMA_MIN(
- VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
- allocationSize - memRange.offset);
- }
- break;
-
- case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
- {
- // 1. Still within this allocation.
- memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
- if(size == VK_WHOLE_SIZE)
- {
- size = allocationSize - offset;
- }
- else
- {
- VMA_ASSERT(offset + size <= allocationSize);
- }
- memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
-
- // 2. Adjust to whole block.
- const VkDeviceSize allocationOffset = hAllocation->GetOffset();
- VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
- const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
- memRange.offset += allocationOffset;
- memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
-
- break;
- }
-
- default:
- VMA_ASSERT(0);
- }
-
- switch(op)
- {
- case VMA_CACHE_FLUSH:
- (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
- break;
- case VMA_CACHE_INVALIDATE:
- (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
- break;
- default:
- VMA_ASSERT(0);
- }
- }
- // else: Just ignore this call.
+ VmaAllocation hAllocation,
+ VkDeviceSize offset, VkDeviceSize size,
+ VMA_CACHE_OPERATION op)
+{
+ const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
+ if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
+ {
+ const VkDeviceSize allocationSize = hAllocation->GetSize();
+ VMA_ASSERT(offset <= allocationSize);
+
+ const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
+
+ VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
+ memRange.memory = hAllocation->GetMemory();
+
+ switch(hAllocation->GetType())
+ {
+ case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
+ memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
+ if(size == VK_WHOLE_SIZE)
+ {
+ memRange.size = allocationSize - memRange.offset;
+ }
+ else
+ {
+ VMA_ASSERT(offset + size <= allocationSize);
+ memRange.size = VMA_MIN(
+ VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
+ allocationSize - memRange.offset);
+ }
+ break;
+
+ case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
+ {
+ // 1. Still within this allocation.
+ memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
+ if(size == VK_WHOLE_SIZE)
+ {
+ size = allocationSize - offset;
+ }
+ else
+ {
+ VMA_ASSERT(offset + size <= allocationSize);
+ }
+ memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
+
+ // 2. Adjust to whole block.
+ const VkDeviceSize allocationOffset = hAllocation->GetOffset();
+ VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
+ const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
+ memRange.offset += allocationOffset;
+ memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
+
+ break;
+ }
+
+ default:
+ VMA_ASSERT(0);
+ }
+
+ switch(op)
+ {
+ case VMA_CACHE_FLUSH:
+ (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
+ break;
+ case VMA_CACHE_INVALIDATE:
+ (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
+ break;
+ default:
+ VMA_ASSERT(0);
+ }
+ }
+ // else: Just ignore this call.
}
void VmaAllocator_T::FreeDedicatedMemory(const VmaAllocation allocation)
{
- VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
+ VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
- const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
- {
- VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocations);
- bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
- VMA_ASSERT(success);
- }
+ const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
+ {
+ VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+ AllocationVectorType* const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
+ VMA_ASSERT(pDedicatedAllocations);
+ bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
+ VMA_ASSERT(success);
+ }
- VkDeviceMemory hMemory = allocation->GetMemory();
-
- /*
- There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
- before vkFreeMemory.
+ VkDeviceMemory hMemory = allocation->GetMemory();
+
+ /*
+ There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory
+ before vkFreeMemory.
- if(allocation->GetMappedData() != VMA_NULL)
- {
- (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
- }
- */
-
- FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
+ if(allocation->GetMappedData() != VMA_NULL)
+ {
+ (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
+ }
+ */
+
+ FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
- VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
+ VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
}
uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits() const
{
- VkBufferCreateInfo dummyBufCreateInfo;
- VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
+ VkBufferCreateInfo dummyBufCreateInfo;
+ VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
- uint32_t memoryTypeBits = 0;
+ uint32_t memoryTypeBits = 0;
- // Create buffer.
- VkBuffer buf = VK_NULL_HANDLE;
- VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
- m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
- if(res == VK_SUCCESS)
- {
- // Query for supported memory types.
- VkMemoryRequirements memReq;
- (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
- memoryTypeBits = memReq.memoryTypeBits;
+ // Create buffer.
+ VkBuffer buf = VK_NULL_HANDLE;
+ VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
+ m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
+ if(res == VK_SUCCESS)
+ {
+ // Query for supported memory types.
+ VkMemoryRequirements memReq;
+ (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
+ memoryTypeBits = memReq.memoryTypeBits;
- // Destroy buffer.
- (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
- }
+ // Destroy buffer.
+ (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
+ }
- return memoryTypeBits;
-}
-
-uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits() const
-{
- // Make sure memory information is already fetched.
- VMA_ASSERT(GetMemoryTypeCount() > 0);
-
- uint32_t memoryTypeBits = UINT32_MAX;
-
- if(!m_UseAmdDeviceCoherentMemory)
- {
- // Exclude memory types that have VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD.
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
- {
- memoryTypeBits &= ~(1u << memTypeIndex);
- }
- }
- }
-
- return memoryTypeBits;
+ return memoryTypeBits;
}
#if VMA_MEMORY_BUDGET
void VmaAllocator_T::UpdateVulkanBudget()
{
- VMA_ASSERT(m_UseExtMemoryBudget);
+ VMA_ASSERT(m_UseExtMemoryBudget);
- VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
+ VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
- VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
- memProps.pNext = &budgetProps;
+ VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
+ memProps.pNext = &budgetProps;
- GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
+ GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
- {
- VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
+ {
+ VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
- for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
- {
- m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
- m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
- m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
- }
- m_Budget.m_OperationsSinceBudgetFetch = 0;
- }
+ for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
+ {
+ m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
+ m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
+ m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
+ }
+ m_Budget.m_OperationsSinceBudgetFetch = 0;
+ }
}
#endif // #if VMA_MEMORY_BUDGET
void VmaAllocator_T::FillAllocation(const VmaAllocation hAllocation, uint8_t pattern)
{
- if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
- !hAllocation->CanBecomeLost() &&
- (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
- {
- void* pData = VMA_NULL;
- VkResult res = Map(hAllocation, &pData);
- if(res == VK_SUCCESS)
- {
- memset(pData, (int)pattern, (size_t)hAllocation->GetSize());
- FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
- Unmap(hAllocation);
- }
- else
- {
- VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
- }
- }
+ if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
+ !hAllocation->CanBecomeLost() &&
+ (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
+ {
+ void* pData = VMA_NULL;
+ VkResult res = Map(hAllocation, &pData);
+ if(res == VK_SUCCESS)
+ {
+ memset(pData, (int)pattern, (size_t)hAllocation->GetSize());
+ FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
+ Unmap(hAllocation);
+ }
+ else
+ {
+ VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
+ }
+ }
}
uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
{
- uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
- if(memoryTypeBits == UINT32_MAX)
- {
- memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
- m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
- }
- return memoryTypeBits;
+ uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
+ if(memoryTypeBits == UINT32_MAX)
+ {
+ memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
+ m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
+ }
+ return memoryTypeBits;
}
#if VMA_STATS_STRING_ENABLED
void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
{
- bool dedicatedAllocationsStarted = false;
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
- AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
- VMA_ASSERT(pDedicatedAllocVector);
- if(pDedicatedAllocVector->empty() == false)
- {
- if(dedicatedAllocationsStarted == false)
- {
- dedicatedAllocationsStarted = true;
- json.WriteString("DedicatedAllocations");
- json.BeginObject();
- }
-
- json.BeginString("Type ");
- json.ContinueString(memTypeIndex);
- json.EndString();
-
- json.BeginArray();
-
- for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
- {
- json.BeginObject(true);
- const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
- hAlloc->PrintParameters(json);
- json.EndObject();
- }
-
- json.EndArray();
- }
- }
- if(dedicatedAllocationsStarted)
- {
- json.EndObject();
- }
-
- {
- bool allocationsStarted = false;
- for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
- {
- if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
- {
- if(allocationsStarted == false)
- {
- allocationsStarted = true;
- json.WriteString("DefaultPools");
- json.BeginObject();
- }
-
- json.BeginString("Type ");
- json.ContinueString(memTypeIndex);
- json.EndString();
-
- m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
- }
- }
- if(allocationsStarted)
- {
- json.EndObject();
- }
- }
-
- // Custom pools
- {
- VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
- const size_t poolCount = m_Pools.size();
- if(poolCount > 0)
- {
- json.WriteString("Pools");
- json.BeginObject();
- for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
- {
- json.BeginString();
- json.ContinueString(m_Pools[poolIndex]->GetId());
- json.EndString();
-
- m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
- }
- json.EndObject();
- }
- }
+ bool dedicatedAllocationsStarted = false;
+ for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+ {
+ VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
+ AllocationVectorType* const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
+ VMA_ASSERT(pDedicatedAllocVector);
+ if(pDedicatedAllocVector->empty() == false)
+ {
+ if(dedicatedAllocationsStarted == false)
+ {
+ dedicatedAllocationsStarted = true;
+ json.WriteString("DedicatedAllocations");
+ json.BeginObject();
+ }
+
+ json.BeginString("Type ");
+ json.ContinueString(memTypeIndex);
+ json.EndString();
+
+ json.BeginArray();
+
+ for(size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
+ {
+ json.BeginObject(true);
+ const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
+ hAlloc->PrintParameters(json);
+ json.EndObject();
+ }
+
+ json.EndArray();
+ }
+ }
+ if(dedicatedAllocationsStarted)
+ {
+ json.EndObject();
+ }
+
+ {
+ bool allocationsStarted = false;
+ for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
+ {
+ if(m_pBlockVectors[memTypeIndex]->IsEmpty() == false)
+ {
+ if(allocationsStarted == false)
+ {
+ allocationsStarted = true;
+ json.WriteString("DefaultPools");
+ json.BeginObject();
+ }
+
+ json.BeginString("Type ");
+ json.ContinueString(memTypeIndex);
+ json.EndString();
+
+ m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
+ }
+ }
+ if(allocationsStarted)
+ {
+ json.EndObject();
+ }
+ }
+
+ // Custom pools
+ {
+ VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
+ const size_t poolCount = m_Pools.size();
+ if(poolCount > 0)
+ {
+ json.WriteString("Pools");
+ json.BeginObject();
+ for(size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
+ {
+ json.BeginString();
+ json.ContinueString(m_Pools[poolIndex]->GetId());
+ json.EndString();
+
+ m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
+ }
+ json.EndObject();
+ }
+ }
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -16803,233 +16363,221 @@ void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
// Public interface
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator(
- const VmaAllocatorCreateInfo* pCreateInfo,
- VmaAllocator* pAllocator)
+ const VmaAllocatorCreateInfo* pCreateInfo,
+ VmaAllocator* pAllocator)
{
- VMA_ASSERT(pCreateInfo && pAllocator);
- VMA_ASSERT(pCreateInfo->vulkanApiVersion == 0 ||
- (VK_VERSION_MAJOR(pCreateInfo->vulkanApiVersion) == 1 && VK_VERSION_MINOR(pCreateInfo->vulkanApiVersion) <= 1));
- VMA_DEBUG_LOG("vmaCreateAllocator");
- *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
- return (*pAllocator)->Init(pCreateInfo);
+ VMA_ASSERT(pCreateInfo && pAllocator);
+ VMA_ASSERT(pCreateInfo->vulkanApiVersion == 0 ||
+ (VK_VERSION_MAJOR(pCreateInfo->vulkanApiVersion) == 1 && VK_VERSION_MINOR(pCreateInfo->vulkanApiVersion) <= 1));
+ VMA_DEBUG_LOG("vmaCreateAllocator");
+ *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo);
+ return (*pAllocator)->Init(pCreateInfo);
}
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator(
- VmaAllocator allocator)
+ VmaAllocator allocator)
{
- if(allocator != VK_NULL_HANDLE)
- {
- VMA_DEBUG_LOG("vmaDestroyAllocator");
- VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
- vma_delete(&allocationCallbacks, allocator);
- }
+ if(allocator != VK_NULL_HANDLE)
+ {
+ VMA_DEBUG_LOG("vmaDestroyAllocator");
+ VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
+ vma_delete(&allocationCallbacks, allocator);
+ }
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
+ VmaAllocator allocator,
+ const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
{
- VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
- *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
+ VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
+ *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties(
- VmaAllocator allocator,
- const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
+ VmaAllocator allocator,
+ const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
{
- VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
- *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
+ VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
+ *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties(
- VmaAllocator allocator,
- uint32_t memoryTypeIndex,
- VkMemoryPropertyFlags* pFlags)
+ VmaAllocator allocator,
+ uint32_t memoryTypeIndex,
+ VkMemoryPropertyFlags* pFlags)
{
- VMA_ASSERT(allocator && pFlags);
- VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
- *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
+ VMA_ASSERT(allocator && pFlags);
+ VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
+ *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
}
VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex(
- VmaAllocator allocator,
- uint32_t frameIndex)
+ VmaAllocator allocator,
+ uint32_t frameIndex)
{
- VMA_ASSERT(allocator);
- VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
+ VMA_ASSERT(allocator);
+ VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->SetCurrentFrameIndex(frameIndex);
+ allocator->SetCurrentFrameIndex(frameIndex);
}
VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStats(
- VmaAllocator allocator,
- VmaStats* pStats)
+ VmaAllocator allocator,
+ VmaStats* pStats)
{
- VMA_ASSERT(allocator && pStats);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->CalculateStats(pStats);
+ VMA_ASSERT(allocator && pStats);
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ allocator->CalculateStats(pStats);
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetBudget(
- VmaAllocator allocator,
- VmaBudget* pBudget)
+ VmaAllocator allocator,
+ VmaBudget* pBudget)
{
- VMA_ASSERT(allocator && pBudget);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
+ VMA_ASSERT(allocator && pBudget);
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
}
#if VMA_STATS_STRING_ENABLED
VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString(
- VmaAllocator allocator,
- char** ppStatsString,
- VkBool32 detailedMap)
-{
- VMA_ASSERT(allocator && ppStatsString);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VmaStringBuilder sb(allocator);
- {
- VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
- json.BeginObject();
-
- VmaBudget budget[VK_MAX_MEMORY_HEAPS];
- allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
-
- VmaStats stats;
- allocator->CalculateStats(&stats);
-
- json.WriteString("Total");
- VmaPrintStatInfo(json, stats.total);
-
- for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
- {
- json.BeginString("Heap ");
- json.ContinueString(heapIndex);
- json.EndString();
- json.BeginObject();
-
- json.WriteString("Size");
- json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
-
- json.WriteString("Flags");
- json.BeginArray(true);
- if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
- {
- json.WriteString("DEVICE_LOCAL");
- }
- json.EndArray();
-
- json.WriteString("Budget");
- json.BeginObject();
- {
- json.WriteString("BlockBytes");
- json.WriteNumber(budget[heapIndex].blockBytes);
- json.WriteString("AllocationBytes");
- json.WriteNumber(budget[heapIndex].allocationBytes);
- json.WriteString("Usage");
- json.WriteNumber(budget[heapIndex].usage);
- json.WriteString("Budget");
- json.WriteNumber(budget[heapIndex].budget);
- }
- json.EndObject();
-
- if(stats.memoryHeap[heapIndex].blockCount > 0)
- {
- json.WriteString("Stats");
- VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
- }
-
- for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
- {
- if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
- {
- json.BeginString("Type ");
- json.ContinueString(typeIndex);
- json.EndString();
-
- json.BeginObject();
-
- json.WriteString("Flags");
- json.BeginArray(true);
- VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
- if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
- {
- json.WriteString("DEVICE_LOCAL");
- }
- if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
- {
- json.WriteString("HOST_VISIBLE");
- }
- if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
- {
- json.WriteString("HOST_COHERENT");
- }
- if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
- {
- json.WriteString("HOST_CACHED");
- }
- if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
- {
- json.WriteString("LAZILY_ALLOCATED");
- }
- if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
- {
- json.WriteString(" PROTECTED");
- }
- if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
- {
- json.WriteString(" DEVICE_COHERENT");
- }
- if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
- {
- json.WriteString(" DEVICE_UNCACHED");
- }
- json.EndArray();
-
- if(stats.memoryType[typeIndex].blockCount > 0)
- {
- json.WriteString("Stats");
- VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
- }
-
- json.EndObject();
- }
- }
-
- json.EndObject();
- }
- if(detailedMap == VK_TRUE)
- {
- allocator->PrintDetailedMap(json);
- }
-
- json.EndObject();
- }
-
- const size_t len = sb.GetLength();
- char* const pChars = vma_new_array(allocator, char, len + 1);
- if(len > 0)
- {
- memcpy(pChars, sb.GetData(), len);
- }
- pChars[len] = '\0';
- *ppStatsString = pChars;
+ VmaAllocator allocator,
+ char** ppStatsString,
+ VkBool32 detailedMap)
+{
+ VMA_ASSERT(allocator && ppStatsString);
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+ VmaStringBuilder sb(allocator);
+ {
+ VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
+ json.BeginObject();
+
+ VmaBudget budget[VK_MAX_MEMORY_HEAPS];
+ allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
+
+ VmaStats stats;
+ allocator->CalculateStats(&stats);
+
+ json.WriteString("Total");
+ VmaPrintStatInfo(json, stats.total);
+
+ for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
+ {
+ json.BeginString("Heap ");
+ json.ContinueString(heapIndex);
+ json.EndString();
+ json.BeginObject();
+
+ json.WriteString("Size");
+ json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
+
+ json.WriteString("Flags");
+ json.BeginArray(true);
+ if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
+ {
+ json.WriteString("DEVICE_LOCAL");
+ }
+ json.EndArray();
+
+ json.WriteString("Budget");
+ json.BeginObject();
+ {
+ json.WriteString("BlockBytes");
+ json.WriteNumber(budget[heapIndex].blockBytes);
+ json.WriteString("AllocationBytes");
+ json.WriteNumber(budget[heapIndex].allocationBytes);
+ json.WriteString("Usage");
+ json.WriteNumber(budget[heapIndex].usage);
+ json.WriteString("Budget");
+ json.WriteNumber(budget[heapIndex].budget);
+ }
+ json.EndObject();
+
+ if(stats.memoryHeap[heapIndex].blockCount > 0)
+ {
+ json.WriteString("Stats");
+ VmaPrintStatInfo(json, stats.memoryHeap[heapIndex]);
+ }
+
+ for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
+ {
+ if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
+ {
+ json.BeginString("Type ");
+ json.ContinueString(typeIndex);
+ json.EndString();
+
+ json.BeginObject();
+
+ json.WriteString("Flags");
+ json.BeginArray(true);
+ VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
+ if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
+ {
+ json.WriteString("DEVICE_LOCAL");
+ }
+ if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
+ {
+ json.WriteString("HOST_VISIBLE");
+ }
+ if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
+ {
+ json.WriteString("HOST_COHERENT");
+ }
+ if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
+ {
+ json.WriteString("HOST_CACHED");
+ }
+ if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
+ {
+ json.WriteString("LAZILY_ALLOCATED");
+ }
+ json.EndArray();
+
+ if(stats.memoryType[typeIndex].blockCount > 0)
+ {
+ json.WriteString("Stats");
+ VmaPrintStatInfo(json, stats.memoryType[typeIndex]);
+ }
+
+ json.EndObject();
+ }
+ }
+
+ json.EndObject();
+ }
+ if(detailedMap == VK_TRUE)
+ {
+ allocator->PrintDetailedMap(json);
+ }
+
+ json.EndObject();
+ }
+
+ const size_t len = sb.GetLength();
+ char* const pChars = vma_new_array(allocator, char, len + 1);
+ if(len > 0)
+ {
+ memcpy(pChars, sb.GetData(), len);
+ }
+ pChars[len] = '\0';
+ *ppStatsString = pChars;
}
VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString(
- VmaAllocator allocator,
- char* pStatsString)
+ VmaAllocator allocator,
+ char* pStatsString)
{
- if(pStatsString != VMA_NULL)
- {
- VMA_ASSERT(allocator);
- size_t len = strlen(pStatsString);
- vma_delete_array(allocator, pStatsString, len + 1);
- }
+ if(pStatsString != VMA_NULL)
+ {
+ VMA_ASSERT(allocator);
+ size_t len = strlen(pStatsString);
+ vma_delete_array(allocator, pStatsString, len + 1);
+ }
}
#endif // #if VMA_STATS_STRING_ENABLED
@@ -17038,166 +16586,157 @@ VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString(
This function is not protected by any mutex because it just reads immutable data.
*/
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex(
- VmaAllocator allocator,
- uint32_t memoryTypeBits,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex)
-{
- VMA_ASSERT(allocator != VK_NULL_HANDLE);
- VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
- VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
- memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
-
- if(pAllocationCreateInfo->memoryTypeBits != 0)
- {
- memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
- }
-
- uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
- uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
- uint32_t notPreferredFlags = 0;
-
- // Convert usage to requiredFlags and preferredFlags.
- switch(pAllocationCreateInfo->usage)
- {
- case VMA_MEMORY_USAGE_UNKNOWN:
- break;
- case VMA_MEMORY_USAGE_GPU_ONLY:
- if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- }
- break;
- case VMA_MEMORY_USAGE_CPU_ONLY:
- requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
- break;
- case VMA_MEMORY_USAGE_CPU_TO_GPU:
- requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
- {
- preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- }
- break;
- case VMA_MEMORY_USAGE_GPU_TO_CPU:
- requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
- preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
- break;
- case VMA_MEMORY_USAGE_CPU_COPY:
- notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
- break;
- case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:
- requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
- break;
- default:
- VMA_ASSERT(0);
- break;
- }
-
- // Avoid DEVICE_COHERENT unless explicitly requested.
- if(((pAllocationCreateInfo->requiredFlags | pAllocationCreateInfo->preferredFlags) &
- (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
- {
- notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
- }
-
- *pMemoryTypeIndex = UINT32_MAX;
- uint32_t minCost = UINT32_MAX;
- for(uint32_t memTypeIndex = 0, memTypeBit = 1;
- memTypeIndex < allocator->GetMemoryTypeCount();
- ++memTypeIndex, memTypeBit <<= 1)
- {
- // This memory type is acceptable according to memoryTypeBits bitmask.
- if((memTypeBit & memoryTypeBits) != 0)
- {
- const VkMemoryPropertyFlags currFlags =
- allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
- // This memory type contains requiredFlags.
- if((requiredFlags & ~currFlags) == 0)
- {
- // Calculate cost as number of bits from preferredFlags not present in this memory type.
- uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
- VmaCountBitsSet(currFlags & notPreferredFlags);
- // Remember memory type with lowest cost.
- if(currCost < minCost)
- {
- *pMemoryTypeIndex = memTypeIndex;
- if(currCost == 0)
- {
- return VK_SUCCESS;
- }
- minCost = currCost;
- }
- }
- }
- }
- return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
+ VmaAllocator allocator,
+ uint32_t memoryTypeBits,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ uint32_t* pMemoryTypeIndex)
+{
+ VMA_ASSERT(allocator != VK_NULL_HANDLE);
+ VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+ VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+ if(pAllocationCreateInfo->memoryTypeBits != 0)
+ {
+ memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits;
+ }
+
+ uint32_t requiredFlags = pAllocationCreateInfo->requiredFlags;
+ uint32_t preferredFlags = pAllocationCreateInfo->preferredFlags;
+ uint32_t notPreferredFlags = 0;
+
+ // Convert usage to requiredFlags and preferredFlags.
+ switch(pAllocationCreateInfo->usage)
+ {
+ case VMA_MEMORY_USAGE_UNKNOWN:
+ break;
+ case VMA_MEMORY_USAGE_GPU_ONLY:
+ if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+ {
+ preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ }
+ break;
+ case VMA_MEMORY_USAGE_CPU_ONLY:
+ requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ break;
+ case VMA_MEMORY_USAGE_CPU_TO_GPU:
+ requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+ {
+ preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ }
+ break;
+ case VMA_MEMORY_USAGE_GPU_TO_CPU:
+ requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+ preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+ break;
+ case VMA_MEMORY_USAGE_CPU_COPY:
+ notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ break;
+ case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:
+ requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
+ break;
+ default:
+ VMA_ASSERT(0);
+ break;
+ }
+
+ *pMemoryTypeIndex = UINT32_MAX;
+ uint32_t minCost = UINT32_MAX;
+ for(uint32_t memTypeIndex = 0, memTypeBit = 1;
+ memTypeIndex < allocator->GetMemoryTypeCount();
+ ++memTypeIndex, memTypeBit <<= 1)
+ {
+ // This memory type is acceptable according to memoryTypeBits bitmask.
+ if((memTypeBit & memoryTypeBits) != 0)
+ {
+ const VkMemoryPropertyFlags currFlags =
+ allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
+ // This memory type contains requiredFlags.
+ if((requiredFlags & ~currFlags) == 0)
+ {
+ // Calculate cost as number of bits from preferredFlags not present in this memory type.
+ uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
+ VmaCountBitsSet(currFlags & notPreferredFlags);
+ // Remember memory type with lowest cost.
+ if(currCost < minCost)
+ {
+ *pMemoryTypeIndex = memTypeIndex;
+ if(currCost == 0)
+ {
+ return VK_SUCCESS;
+ }
+ minCost = currCost;
+ }
+ }
+ }
+ }
+ return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex)
-{
- VMA_ASSERT(allocator != VK_NULL_HANDLE);
- VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
- VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
- VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
- const VkDevice hDev = allocator->m_hDevice;
- VkBuffer hBuffer = VK_NULL_HANDLE;
- VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
- hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
- if(res == VK_SUCCESS)
- {
- VkMemoryRequirements memReq = {};
- allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
- hDev, hBuffer, &memReq);
-
- res = vmaFindMemoryTypeIndex(
- allocator,
- memReq.memoryTypeBits,
- pAllocationCreateInfo,
- pMemoryTypeIndex);
-
- allocator->GetVulkanFunctions().vkDestroyBuffer(
- hDev, hBuffer, allocator->GetAllocationCallbacks());
- }
- return res;
+ VmaAllocator allocator,
+ const VkBufferCreateInfo* pBufferCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ uint32_t* pMemoryTypeIndex)
+{
+ VMA_ASSERT(allocator != VK_NULL_HANDLE);
+ VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
+ VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+ VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+ const VkDevice hDev = allocator->m_hDevice;
+ VkBuffer hBuffer = VK_NULL_HANDLE;
+ VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
+ hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
+ if(res == VK_SUCCESS)
+ {
+ VkMemoryRequirements memReq = {};
+ allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
+ hDev, hBuffer, &memReq);
+
+ res = vmaFindMemoryTypeIndex(
+ allocator,
+ memReq.memoryTypeBits,
+ pAllocationCreateInfo,
+ pMemoryTypeIndex);
+
+ allocator->GetVulkanFunctions().vkDestroyBuffer(
+ hDev, hBuffer, allocator->GetAllocationCallbacks());
+ }
+ return res;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- uint32_t* pMemoryTypeIndex)
-{
- VMA_ASSERT(allocator != VK_NULL_HANDLE);
- VMA_ASSERT(pImageCreateInfo != VMA_NULL);
- VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
- VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
-
- const VkDevice hDev = allocator->m_hDevice;
- VkImage hImage = VK_NULL_HANDLE;
- VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
- hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
- if(res == VK_SUCCESS)
- {
- VkMemoryRequirements memReq = {};
- allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
- hDev, hImage, &memReq);
-
- res = vmaFindMemoryTypeIndex(
- allocator,
- memReq.memoryTypeBits,
- pAllocationCreateInfo,
- pMemoryTypeIndex);
-
- allocator->GetVulkanFunctions().vkDestroyImage(
- hDev, hImage, allocator->GetAllocationCallbacks());
- }
- return res;
+ VmaAllocator allocator,
+ const VkImageCreateInfo* pImageCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ uint32_t* pMemoryTypeIndex)
+{
+ VMA_ASSERT(allocator != VK_NULL_HANDLE);
+ VMA_ASSERT(pImageCreateInfo != VMA_NULL);
+ VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
+ VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
+
+ const VkDevice hDev = allocator->m_hDevice;
+ VkImage hImage = VK_NULL_HANDLE;
+ VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
+ hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
+ if(res == VK_SUCCESS)
+ {
+ VkMemoryRequirements memReq = {};
+ allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
+ hDev, hImage, &memReq);
+
+ res = vmaFindMemoryTypeIndex(
+ allocator,
+ memReq.memoryTypeBits,
+ pAllocationCreateInfo,
+ pMemoryTypeIndex);
+
+ allocator->GetVulkanFunctions().vkDestroyImage(
+ hDev, hImage, allocator->GetAllocationCallbacks());
+ }
+ return res;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool(
@@ -17205,1058 +16744,1021 @@ VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool(
const VmaPoolCreateInfo* pCreateInfo,
VmaPool* pPool)
{
- VMA_ASSERT(allocator && pCreateInfo && pPool);
-
- VMA_DEBUG_LOG("vmaCreatePool");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkResult res = allocator->CreatePool(pCreateInfo, pPool);
-
+ VMA_ASSERT(allocator && pCreateInfo && pPool);
+
+ VMA_DEBUG_LOG("vmaCreatePool");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+ VkResult res = allocator->CreatePool(pCreateInfo, pPool);
+
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
+ }
#endif
-
- return res;
+
+ return res;
}
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool(
- VmaAllocator allocator,
- VmaPool pool)
-{
- VMA_ASSERT(allocator);
-
- if(pool == VK_NULL_HANDLE)
- {
- return;
- }
-
- VMA_DEBUG_LOG("vmaDestroyPool");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
+ VmaAllocator allocator,
+ VmaPool pool)
+{
+ VMA_ASSERT(allocator);
+
+ if(pool == VK_NULL_HANDLE)
+ {
+ return;
+ }
+
+ VMA_DEBUG_LOG("vmaDestroyPool");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
+ }
#endif
- allocator->DestroyPool(pool);
+ allocator->DestroyPool(pool);
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStats(
- VmaAllocator allocator,
- VmaPool pool,
- VmaPoolStats* pPoolStats)
+ VmaAllocator allocator,
+ VmaPool pool,
+ VmaPoolStats* pPoolStats)
{
- VMA_ASSERT(allocator && pool && pPoolStats);
+ VMA_ASSERT(allocator && pool && pPoolStats);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->GetPoolStats(pool, pPoolStats);
+ allocator->GetPoolStats(pool, pPoolStats);
}
VMA_CALL_PRE void VMA_CALL_POST vmaMakePoolAllocationsLost(
- VmaAllocator allocator,
- VmaPool pool,
- size_t* pLostAllocationCount)
+ VmaAllocator allocator,
+ VmaPool pool,
+ size_t* pLostAllocationCount)
{
- VMA_ASSERT(allocator && pool);
+ VMA_ASSERT(allocator && pool);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
+ }
#endif
- allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
+ allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
{
- VMA_ASSERT(allocator && pool);
+ VMA_ASSERT(allocator && pool);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- VMA_DEBUG_LOG("vmaCheckPoolCorruption");
+ VMA_DEBUG_LOG("vmaCheckPoolCorruption");
- return allocator->CheckPoolCorruption(pool);
+ return allocator->CheckPoolCorruption(pool);
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName(
- VmaAllocator allocator,
- VmaPool pool,
- const char** ppName)
+ VmaAllocator allocator,
+ VmaPool pool,
+ const char** ppName)
{
- VMA_ASSERT(allocator && pool);
-
- VMA_DEBUG_LOG("vmaGetPoolName");
+ VMA_ASSERT(allocator && pool);
+
+ VMA_DEBUG_LOG("vmaGetPoolName");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- *ppName = pool->GetName();
+ *ppName = pool->GetName();
}
VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName(
- VmaAllocator allocator,
- VmaPool pool,
- const char* pName)
+ VmaAllocator allocator,
+ VmaPool pool,
+ const char* pName)
{
- VMA_ASSERT(allocator && pool);
+ VMA_ASSERT(allocator && pool);
- VMA_DEBUG_LOG("vmaSetPoolName");
+ VMA_DEBUG_LOG("vmaSetPoolName");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- pool->SetName(pName);
+ pool->SetName(pName);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
+ }
#endif
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
+ VmaAllocator allocator,
+ const VkMemoryRequirements* pVkMemoryRequirements,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo)
{
- VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
+ VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
- VMA_DEBUG_LOG("vmaAllocateMemory");
+ VMA_DEBUG_LOG("vmaAllocateMemory");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
VkResult result = allocator->AllocateMemory(
- *pVkMemoryRequirements,
- false, // requiresDedicatedAllocation
- false, // prefersDedicatedAllocation
- VK_NULL_HANDLE, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_UNKNOWN,
- 1, // allocationCount
- pAllocation);
+ *pVkMemoryRequirements,
+ false, // requiresDedicatedAllocation
+ false, // prefersDedicatedAllocation
+ VK_NULL_HANDLE, // dedicatedBuffer
+ VK_NULL_HANDLE, // dedicatedImage
+ *pCreateInfo,
+ VMA_SUBALLOCATION_TYPE_UNKNOWN,
+ 1, // allocationCount
+ pAllocation);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemory(
- allocator->GetCurrentFrameIndex(),
- *pVkMemoryRequirements,
- *pCreateInfo,
- *pAllocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordAllocateMemory(
+ allocator->GetCurrentFrameIndex(),
+ *pVkMemoryRequirements,
+ *pCreateInfo,
+ *pAllocation);
+ }
#endif
-
- if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
+
+ if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
+ {
+ allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+ }
return result;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages(
- VmaAllocator allocator,
- const VkMemoryRequirements* pVkMemoryRequirements,
- const VmaAllocationCreateInfo* pCreateInfo,
- size_t allocationCount,
- VmaAllocation* pAllocations,
- VmaAllocationInfo* pAllocationInfo)
+ VmaAllocator allocator,
+ const VkMemoryRequirements* pVkMemoryRequirements,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ size_t allocationCount,
+ VmaAllocation* pAllocations,
+ VmaAllocationInfo* pAllocationInfo)
{
- if(allocationCount == 0)
- {
- return VK_SUCCESS;
- }
+ if(allocationCount == 0)
+ {
+ return VK_SUCCESS;
+ }
- VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
+ VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
- VMA_DEBUG_LOG("vmaAllocateMemoryPages");
+ VMA_DEBUG_LOG("vmaAllocateMemoryPages");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
VkResult result = allocator->AllocateMemory(
- *pVkMemoryRequirements,
- false, // requiresDedicatedAllocation
- false, // prefersDedicatedAllocation
- VK_NULL_HANDLE, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_UNKNOWN,
- allocationCount,
- pAllocations);
+ *pVkMemoryRequirements,
+ false, // requiresDedicatedAllocation
+ false, // prefersDedicatedAllocation
+ VK_NULL_HANDLE, // dedicatedBuffer
+ VK_NULL_HANDLE, // dedicatedImage
+ *pCreateInfo,
+ VMA_SUBALLOCATION_TYPE_UNKNOWN,
+ allocationCount,
+ pAllocations);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemoryPages(
- allocator->GetCurrentFrameIndex(),
- *pVkMemoryRequirements,
- *pCreateInfo,
- (uint64_t)allocationCount,
- pAllocations);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordAllocateMemoryPages(
+ allocator->GetCurrentFrameIndex(),
+ *pVkMemoryRequirements,
+ *pCreateInfo,
+ (uint64_t)allocationCount,
+ pAllocations);
+ }
#endif
-
- if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
- {
- for(size_t i = 0; i < allocationCount; ++i)
- {
- allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
- }
- }
+
+ if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
+ {
+ for(size_t i = 0; i < allocationCount; ++i)
+ {
+ allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
+ }
+ }
return result;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
-
- VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation);
-
- VkResult result = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- buffer, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_BUFFER,
- 1, // allocationCount
- pAllocation);
+ VmaAllocator allocator,
+ VkBuffer buffer,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo)
+{
+ VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
+
+ VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+ VkMemoryRequirements vkMemReq = {};
+ bool requiresDedicatedAllocation = false;
+ bool prefersDedicatedAllocation = false;
+ allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation);
+
+ VkResult result = allocator->AllocateMemory(
+ vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation,
+ buffer, // dedicatedBuffer
+ VK_NULL_HANDLE, // dedicatedImage
+ *pCreateInfo,
+ VMA_SUBALLOCATION_TYPE_BUFFER,
+ 1, // allocationCount
+ pAllocation);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
- allocator->GetCurrentFrameIndex(),
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- *pCreateInfo,
- *pAllocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
+ allocator->GetCurrentFrameIndex(),
+ vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation,
+ *pCreateInfo,
+ *pAllocation);
+ }
#endif
- if(pAllocationInfo && result == VK_SUCCESS)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
+ if(pAllocationInfo && result == VK_SUCCESS)
+ {
+ allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+ }
return result;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage(
- VmaAllocator allocator,
- VkImage image,
- const VmaAllocationCreateInfo* pCreateInfo,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
-
- VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetImageMemoryRequirements(image, vkMemReq,
- requiresDedicatedAllocation, prefersDedicatedAllocation);
-
- VkResult result = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- VK_NULL_HANDLE, // dedicatedBuffer
- image, // dedicatedImage
- *pCreateInfo,
- VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
- 1, // allocationCount
- pAllocation);
+ VmaAllocator allocator,
+ VkImage image,
+ const VmaAllocationCreateInfo* pCreateInfo,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo)
+{
+ VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
+
+ VMA_DEBUG_LOG("vmaAllocateMemoryForImage");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+ VkMemoryRequirements vkMemReq = {};
+ bool requiresDedicatedAllocation = false;
+ bool prefersDedicatedAllocation = false;
+ allocator->GetImageMemoryRequirements(image, vkMemReq,
+ requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+ VkResult result = allocator->AllocateMemory(
+ vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation,
+ VK_NULL_HANDLE, // dedicatedBuffer
+ image, // dedicatedImage
+ *pCreateInfo,
+ VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
+ 1, // allocationCount
+ pAllocation);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordAllocateMemoryForImage(
- allocator->GetCurrentFrameIndex(),
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- *pCreateInfo,
- *pAllocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordAllocateMemoryForImage(
+ allocator->GetCurrentFrameIndex(),
+ vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation,
+ *pCreateInfo,
+ *pAllocation);
+ }
#endif
- if(pAllocationInfo && result == VK_SUCCESS)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
+ if(pAllocationInfo && result == VK_SUCCESS)
+ {
+ allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+ }
return result;
}
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory(
- VmaAllocator allocator,
- VmaAllocation allocation)
-{
- VMA_ASSERT(allocator);
-
- if(allocation == VK_NULL_HANDLE)
- {
- return;
- }
-
- VMA_DEBUG_LOG("vmaFreeMemory");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VmaAllocator allocator,
+ VmaAllocation allocation)
+{
+ VMA_ASSERT(allocator);
+
+ if(allocation == VK_NULL_HANDLE)
+ {
+ return;
+ }
+
+ VMA_DEBUG_LOG("vmaFreeMemory");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordFreeMemory(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordFreeMemory(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
-
- allocator->FreeMemory(
- 1, // allocationCount
- &allocation);
+
+ allocator->FreeMemory(
+ 1, // allocationCount
+ &allocation);
}
VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages(
- VmaAllocator allocator,
- size_t allocationCount,
- VmaAllocation* pAllocations)
+ VmaAllocator allocator,
+ size_t allocationCount,
+ VmaAllocation* pAllocations)
{
- if(allocationCount == 0)
- {
- return;
- }
+ if(allocationCount == 0)
+ {
+ return;
+ }
- VMA_ASSERT(allocator);
-
- VMA_DEBUG_LOG("vmaFreeMemoryPages");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_ASSERT(allocator);
+
+ VMA_DEBUG_LOG("vmaFreeMemoryPages");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordFreeMemoryPages(
- allocator->GetCurrentFrameIndex(),
- (uint64_t)allocationCount,
- pAllocations);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordFreeMemoryPages(
+ allocator->GetCurrentFrameIndex(),
+ (uint64_t)allocationCount,
+ pAllocations);
+ }
#endif
-
- allocator->FreeMemory(allocationCount, pAllocations);
+
+ allocator->FreeMemory(allocationCount, pAllocations);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaResizeAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize newSize)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize newSize)
{
- VMA_ASSERT(allocator && allocation);
-
- VMA_DEBUG_LOG("vmaResizeAllocation");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_ASSERT(allocator && allocation);
+
+ VMA_DEBUG_LOG("vmaResizeAllocation");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- return allocator->ResizeAllocation(allocation, newSize);
+ return allocator->ResizeAllocation(allocation, newSize);
}
VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VmaAllocationInfo* pAllocationInfo)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VmaAllocationInfo* pAllocationInfo)
{
- VMA_ASSERT(allocator && allocation && pAllocationInfo);
+ VMA_ASSERT(allocator && allocation && pAllocationInfo);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordGetAllocationInfo(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordGetAllocationInfo(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
- allocator->GetAllocationInfo(allocation, pAllocationInfo);
+ allocator->GetAllocationInfo(allocation, pAllocationInfo);
}
VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaTouchAllocation(
- VmaAllocator allocator,
- VmaAllocation allocation)
+ VmaAllocator allocator,
+ VmaAllocation allocation)
{
- VMA_ASSERT(allocator && allocation);
+ VMA_ASSERT(allocator && allocation);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordTouchAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordTouchAllocation(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
- return allocator->TouchAllocation(allocation);
+ return allocator->TouchAllocation(allocation);
}
VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void* pUserData)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ void* pUserData)
{
- VMA_ASSERT(allocator && allocation);
+ VMA_ASSERT(allocator && allocation);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocation->SetUserData(allocator, pUserData);
+ allocation->SetUserData(allocator, pUserData);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordSetAllocationUserData(
- allocator->GetCurrentFrameIndex(),
- allocation,
- pUserData);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordSetAllocationUserData(
+ allocator->GetCurrentFrameIndex(),
+ allocation,
+ pUserData);
+ }
#endif
}
VMA_CALL_PRE void VMA_CALL_POST vmaCreateLostAllocation(
- VmaAllocator allocator,
- VmaAllocation* pAllocation)
+ VmaAllocator allocator,
+ VmaAllocation* pAllocation)
{
- VMA_ASSERT(allocator && pAllocation);
+ VMA_ASSERT(allocator && pAllocation);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK;
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK;
- allocator->CreateLostAllocation(pAllocation);
+ allocator->CreateLostAllocation(pAllocation);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreateLostAllocation(
- allocator->GetCurrentFrameIndex(),
- *pAllocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordCreateLostAllocation(
+ allocator->GetCurrentFrameIndex(),
+ *pAllocation);
+ }
#endif
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- void** ppData)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ void** ppData)
{
- VMA_ASSERT(allocator && allocation && ppData);
+ VMA_ASSERT(allocator && allocation && ppData);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- VkResult res = allocator->Map(allocation, ppData);
+ VkResult res = allocator->Map(allocation, ppData);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordMapMemory(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordMapMemory(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
- return res;
+ return res;
}
VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory(
- VmaAllocator allocator,
- VmaAllocation allocation)
+ VmaAllocator allocator,
+ VmaAllocation allocation)
{
- VMA_ASSERT(allocator && allocation);
+ VMA_ASSERT(allocator && allocation);
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordUnmapMemory(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordUnmapMemory(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
- allocator->Unmap(allocation);
+ allocator->Unmap(allocation);
}
VMA_CALL_PRE void VMA_CALL_POST vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
{
- VMA_ASSERT(allocator && allocation);
+ VMA_ASSERT(allocator && allocation);
- VMA_DEBUG_LOG("vmaFlushAllocation");
+ VMA_DEBUG_LOG("vmaFlushAllocation");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
+ allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordFlushAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation, offset, size);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordFlushAllocation(
+ allocator->GetCurrentFrameIndex(),
+ allocation, offset, size);
+ }
#endif
}
VMA_CALL_PRE void VMA_CALL_POST vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
{
- VMA_ASSERT(allocator && allocation);
+ VMA_ASSERT(allocator && allocation);
- VMA_DEBUG_LOG("vmaInvalidateAllocation");
+ VMA_DEBUG_LOG("vmaInvalidateAllocation");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
+ allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordInvalidateAllocation(
- allocator->GetCurrentFrameIndex(),
- allocation, offset, size);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordInvalidateAllocation(
+ allocator->GetCurrentFrameIndex(),
+ allocation, offset, size);
+ }
#endif
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
{
- VMA_ASSERT(allocator);
+ VMA_ASSERT(allocator);
- VMA_DEBUG_LOG("vmaCheckCorruption");
+ VMA_DEBUG_LOG("vmaCheckCorruption");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- return allocator->CheckCorruption(memoryTypeBits);
+ return allocator->CheckCorruption(memoryTypeBits);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaDefragment(
- VmaAllocator allocator,
- VmaAllocation* pAllocations,
- size_t allocationCount,
- VkBool32* pAllocationsChanged,
- const VmaDefragmentationInfo *pDefragmentationInfo,
- VmaDefragmentationStats* pDefragmentationStats)
-{
- // Deprecated interface, reimplemented using new one.
-
- VmaDefragmentationInfo2 info2 = {};
- info2.allocationCount = (uint32_t)allocationCount;
- info2.pAllocations = pAllocations;
- info2.pAllocationsChanged = pAllocationsChanged;
- if(pDefragmentationInfo != VMA_NULL)
- {
- info2.maxCpuAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
- info2.maxCpuBytesToMove = pDefragmentationInfo->maxBytesToMove;
- }
- else
- {
- info2.maxCpuAllocationsToMove = UINT32_MAX;
- info2.maxCpuBytesToMove = VK_WHOLE_SIZE;
- }
- // info2.flags, maxGpuAllocationsToMove, maxGpuBytesToMove, commandBuffer deliberately left zero.
-
- VmaDefragmentationContext ctx;
- VkResult res = vmaDefragmentationBegin(allocator, &info2, pDefragmentationStats, &ctx);
- if(res == VK_NOT_READY)
- {
- res = vmaDefragmentationEnd( allocator, ctx);
- }
- return res;
+ VmaAllocator allocator,
+ VmaAllocation* pAllocations,
+ size_t allocationCount,
+ VkBool32* pAllocationsChanged,
+ const VmaDefragmentationInfo *pDefragmentationInfo,
+ VmaDefragmentationStats* pDefragmentationStats)
+{
+ // Deprecated interface, reimplemented using new one.
+
+ VmaDefragmentationInfo2 info2 = {};
+ info2.allocationCount = (uint32_t)allocationCount;
+ info2.pAllocations = pAllocations;
+ info2.pAllocationsChanged = pAllocationsChanged;
+ if(pDefragmentationInfo != VMA_NULL)
+ {
+ info2.maxCpuAllocationsToMove = pDefragmentationInfo->maxAllocationsToMove;
+ info2.maxCpuBytesToMove = pDefragmentationInfo->maxBytesToMove;
+ }
+ else
+ {
+ info2.maxCpuAllocationsToMove = UINT32_MAX;
+ info2.maxCpuBytesToMove = VK_WHOLE_SIZE;
+ }
+ // info2.flags, maxGpuAllocationsToMove, maxGpuBytesToMove, commandBuffer deliberately left zero.
+
+ VmaDefragmentationContext ctx;
+ VkResult res = vmaDefragmentationBegin(allocator, &info2, pDefragmentationStats, &ctx);
+ if(res == VK_NOT_READY)
+ {
+ res = vmaDefragmentationEnd( allocator, ctx);
+ }
+ return res;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaDefragmentationBegin(
- VmaAllocator allocator,
- const VmaDefragmentationInfo2* pInfo,
- VmaDefragmentationStats* pStats,
- VmaDefragmentationContext *pContext)
+ VmaAllocator allocator,
+ const VmaDefragmentationInfo2* pInfo,
+ VmaDefragmentationStats* pStats,
+ VmaDefragmentationContext *pContext)
{
- VMA_ASSERT(allocator && pInfo && pContext);
+ VMA_ASSERT(allocator && pInfo && pContext);
- // Degenerate case: Nothing to defragment.
- if(pInfo->allocationCount == 0 && pInfo->poolCount == 0)
- {
- return VK_SUCCESS;
- }
+ // Degenerate case: Nothing to defragment.
+ if(pInfo->allocationCount == 0 && pInfo->poolCount == 0)
+ {
+ return VK_SUCCESS;
+ }
- VMA_ASSERT(pInfo->allocationCount == 0 || pInfo->pAllocations != VMA_NULL);
- VMA_ASSERT(pInfo->poolCount == 0 || pInfo->pPools != VMA_NULL);
- VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->allocationCount, pInfo->pAllocations));
- VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->poolCount, pInfo->pPools));
+ VMA_ASSERT(pInfo->allocationCount == 0 || pInfo->pAllocations != VMA_NULL);
+ VMA_ASSERT(pInfo->poolCount == 0 || pInfo->pPools != VMA_NULL);
+ VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->allocationCount, pInfo->pAllocations));
+ VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->poolCount, pInfo->pPools));
- VMA_DEBUG_LOG("vmaDefragmentationBegin");
+ VMA_DEBUG_LOG("vmaDefragmentationBegin");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
+ VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDefragmentationBegin(
- allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordDefragmentationBegin(
+ allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
+ }
#endif
- return res;
+ return res;
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaDefragmentationEnd(
- VmaAllocator allocator,
- VmaDefragmentationContext context)
+ VmaAllocator allocator,
+ VmaDefragmentationContext context)
{
- VMA_ASSERT(allocator);
+ VMA_ASSERT(allocator);
- VMA_DEBUG_LOG("vmaDefragmentationEnd");
+ VMA_DEBUG_LOG("vmaDefragmentationEnd");
- if(context != VK_NULL_HANDLE)
- {
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ if(context != VK_NULL_HANDLE)
+ {
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDefragmentationEnd(
- allocator->GetCurrentFrameIndex(), context);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordDefragmentationEnd(
+ allocator->GetCurrentFrameIndex(), context);
+ }
#endif
- return allocator->DefragmentationEnd(context);
- }
- else
- {
- return VK_SUCCESS;
- }
-}
-
-VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass(
- VmaAllocator allocator,
- VmaDefragmentationContext context,
- VmaDefragmentationPassInfo* pInfo
- )
-{
- VMA_ASSERT(allocator);
- VMA_ASSERT(pInfo);
- VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->moveCount, pInfo->pMoves));
-
- VMA_DEBUG_LOG("vmaBeginDefragmentationPass");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- if(context == VK_NULL_HANDLE)
- {
- pInfo->moveCount = 0;
- return VK_SUCCESS;
- }
-
- return allocator->DefragmentationPassBegin(pInfo, context);
-}
-VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass(
- VmaAllocator allocator,
- VmaDefragmentationContext context)
-{
- VMA_ASSERT(allocator);
-
- VMA_DEBUG_LOG("vmaEndDefragmentationPass");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- if(context == VK_NULL_HANDLE)
- return VK_SUCCESS;
-
- return allocator->DefragmentationPassEnd(context);
+ return allocator->DefragmentationEnd(context);
+ }
+ else
+ {
+ return VK_SUCCESS;
+ }
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkBuffer buffer)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkBuffer buffer)
{
- VMA_ASSERT(allocator && allocation && buffer);
+ VMA_ASSERT(allocator && allocation && buffer);
- VMA_DEBUG_LOG("vmaBindBufferMemory");
+ VMA_DEBUG_LOG("vmaBindBufferMemory");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
+ return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize allocationLocalOffset,
- VkBuffer buffer,
- const void* pNext)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize allocationLocalOffset,
+ VkBuffer buffer,
+ const void* pNext)
{
- VMA_ASSERT(allocator && allocation && buffer);
+ VMA_ASSERT(allocator && allocation && buffer);
- VMA_DEBUG_LOG("vmaBindBufferMemory2");
+ VMA_DEBUG_LOG("vmaBindBufferMemory2");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
+ return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkImage image)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkImage image)
{
- VMA_ASSERT(allocator && allocation && image);
+ VMA_ASSERT(allocator && allocation && image);
- VMA_DEBUG_LOG("vmaBindImageMemory");
+ VMA_DEBUG_LOG("vmaBindImageMemory");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
+ return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2(
- VmaAllocator allocator,
- VmaAllocation allocation,
- VkDeviceSize allocationLocalOffset,
- VkImage image,
- const void* pNext)
+ VmaAllocator allocator,
+ VmaAllocation allocation,
+ VkDeviceSize allocationLocalOffset,
+ VkImage image,
+ const void* pNext)
{
- VMA_ASSERT(allocator && allocation && image);
+ VMA_ASSERT(allocator && allocation && image);
- VMA_DEBUG_LOG("vmaBindImageMemory2");
+ VMA_DEBUG_LOG("vmaBindImageMemory2");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
- return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
+ return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer(
- VmaAllocator allocator,
- const VkBufferCreateInfo* pBufferCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkBuffer* pBuffer,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
-
- if(pBufferCreateInfo->size == 0)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
-
- VMA_DEBUG_LOG("vmaCreateBuffer");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- *pBuffer = VK_NULL_HANDLE;
- *pAllocation = VK_NULL_HANDLE;
-
- // 1. Create VkBuffer.
- VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
- allocator->m_hDevice,
- pBufferCreateInfo,
- allocator->GetAllocationCallbacks(),
- pBuffer);
- if(res >= 0)
- {
- // 2. vkGetBufferMemoryRequirements.
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
- requiresDedicatedAllocation, prefersDedicatedAllocation);
-
- // Make sure alignment requirements for specific buffer usages reported
- // in Physical Device Properties are included in alignment reported by memory requirements.
- if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
- {
- VMA_ASSERT(vkMemReq.alignment %
- allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
- }
- if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
- {
- VMA_ASSERT(vkMemReq.alignment %
- allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
- }
- if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
- {
- VMA_ASSERT(vkMemReq.alignment %
- allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
- }
-
- // 3. Allocate memory using allocator.
- res = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- *pBuffer, // dedicatedBuffer
- VK_NULL_HANDLE, // dedicatedImage
- *pAllocationCreateInfo,
- VMA_SUBALLOCATION_TYPE_BUFFER,
- 1, // allocationCount
- pAllocation);
+ VmaAllocator allocator,
+ const VkBufferCreateInfo* pBufferCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ VkBuffer* pBuffer,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo)
+{
+ VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
+
+ if(pBufferCreateInfo->size == 0)
+ {
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VMA_DEBUG_LOG("vmaCreateBuffer");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+ *pBuffer = VK_NULL_HANDLE;
+ *pAllocation = VK_NULL_HANDLE;
+
+ // 1. Create VkBuffer.
+ VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
+ allocator->m_hDevice,
+ pBufferCreateInfo,
+ allocator->GetAllocationCallbacks(),
+ pBuffer);
+ if(res >= 0)
+ {
+ // 2. vkGetBufferMemoryRequirements.
+ VkMemoryRequirements vkMemReq = {};
+ bool requiresDedicatedAllocation = false;
+ bool prefersDedicatedAllocation = false;
+ allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
+ requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+ // Make sure alignment requirements for specific buffer usages reported
+ // in Physical Device Properties are included in alignment reported by memory requirements.
+ if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
+ {
+ VMA_ASSERT(vkMemReq.alignment %
+ allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
+ }
+ if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
+ {
+ VMA_ASSERT(vkMemReq.alignment %
+ allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
+ }
+ if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
+ {
+ VMA_ASSERT(vkMemReq.alignment %
+ allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
+ }
+
+ // 3. Allocate memory using allocator.
+ res = allocator->AllocateMemory(
+ vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation,
+ *pBuffer, // dedicatedBuffer
+ VK_NULL_HANDLE, // dedicatedImage
+ *pAllocationCreateInfo,
+ VMA_SUBALLOCATION_TYPE_BUFFER,
+ 1, // allocationCount
+ pAllocation);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreateBuffer(
- allocator->GetCurrentFrameIndex(),
- *pBufferCreateInfo,
- *pAllocationCreateInfo,
- *pAllocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordCreateBuffer(
+ allocator->GetCurrentFrameIndex(),
+ *pBufferCreateInfo,
+ *pAllocationCreateInfo,
+ *pAllocation);
+ }
#endif
- if(res >= 0)
- {
- // 3. Bind buffer with memory.
- if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0)
- {
- res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
- }
- if(res >= 0)
- {
- // All steps succeeded.
- #if VMA_STATS_STRING_ENABLED
- (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
- #endif
- if(pAllocationInfo != VMA_NULL)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return VK_SUCCESS;
- }
- allocator->FreeMemory(
- 1, // allocationCount
- pAllocation);
- *pAllocation = VK_NULL_HANDLE;
- (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
- *pBuffer = VK_NULL_HANDLE;
- return res;
- }
- (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
- *pBuffer = VK_NULL_HANDLE;
- return res;
- }
- return res;
+ if(res >= 0)
+ {
+ // 3. Bind buffer with memory.
+ if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0)
+ {
+ res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
+ }
+ if(res >= 0)
+ {
+ // All steps succeeded.
+ #if VMA_STATS_STRING_ENABLED
+ (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
+ #endif
+ if(pAllocationInfo != VMA_NULL)
+ {
+ allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+ }
+
+ return VK_SUCCESS;
+ }
+ allocator->FreeMemory(
+ 1, // allocationCount
+ pAllocation);
+ *pAllocation = VK_NULL_HANDLE;
+ (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+ *pBuffer = VK_NULL_HANDLE;
+ return res;
+ }
+ (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
+ *pBuffer = VK_NULL_HANDLE;
+ return res;
+ }
+ return res;
}
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer(
- VmaAllocator allocator,
- VkBuffer buffer,
- VmaAllocation allocation)
+ VmaAllocator allocator,
+ VkBuffer buffer,
+ VmaAllocation allocation)
{
- VMA_ASSERT(allocator);
+ VMA_ASSERT(allocator);
- if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
- {
- return;
- }
+ if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
+ {
+ return;
+ }
- VMA_DEBUG_LOG("vmaDestroyBuffer");
+ VMA_DEBUG_LOG("vmaDestroyBuffer");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDestroyBuffer(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordDestroyBuffer(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
- if(buffer != VK_NULL_HANDLE)
- {
- (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
- }
+ if(buffer != VK_NULL_HANDLE)
+ {
+ (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
+ }
- if(allocation != VK_NULL_HANDLE)
- {
- allocator->FreeMemory(
- 1, // allocationCount
- &allocation);
- }
+ if(allocation != VK_NULL_HANDLE)
+ {
+ allocator->FreeMemory(
+ 1, // allocationCount
+ &allocation);
+ }
}
VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage(
- VmaAllocator allocator,
- const VkImageCreateInfo* pImageCreateInfo,
- const VmaAllocationCreateInfo* pAllocationCreateInfo,
- VkImage* pImage,
- VmaAllocation* pAllocation,
- VmaAllocationInfo* pAllocationInfo)
-{
- VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
-
- if(pImageCreateInfo->extent.width == 0 ||
- pImageCreateInfo->extent.height == 0 ||
- pImageCreateInfo->extent.depth == 0 ||
- pImageCreateInfo->mipLevels == 0 ||
- pImageCreateInfo->arrayLayers == 0)
- {
- return VK_ERROR_VALIDATION_FAILED_EXT;
- }
-
- VMA_DEBUG_LOG("vmaCreateImage");
-
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
-
- *pImage = VK_NULL_HANDLE;
- *pAllocation = VK_NULL_HANDLE;
-
- // 1. Create VkImage.
- VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
- allocator->m_hDevice,
- pImageCreateInfo,
- allocator->GetAllocationCallbacks(),
- pImage);
- if(res >= 0)
- {
- VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
- VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
- VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
-
- // 2. Allocate memory using allocator.
- VkMemoryRequirements vkMemReq = {};
- bool requiresDedicatedAllocation = false;
- bool prefersDedicatedAllocation = false;
- allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
- requiresDedicatedAllocation, prefersDedicatedAllocation);
-
- res = allocator->AllocateMemory(
- vkMemReq,
- requiresDedicatedAllocation,
- prefersDedicatedAllocation,
- VK_NULL_HANDLE, // dedicatedBuffer
- *pImage, // dedicatedImage
- *pAllocationCreateInfo,
- suballocType,
- 1, // allocationCount
- pAllocation);
+ VmaAllocator allocator,
+ const VkImageCreateInfo* pImageCreateInfo,
+ const VmaAllocationCreateInfo* pAllocationCreateInfo,
+ VkImage* pImage,
+ VmaAllocation* pAllocation,
+ VmaAllocationInfo* pAllocationInfo)
+{
+ VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
+
+ if(pImageCreateInfo->extent.width == 0 ||
+ pImageCreateInfo->extent.height == 0 ||
+ pImageCreateInfo->extent.depth == 0 ||
+ pImageCreateInfo->mipLevels == 0 ||
+ pImageCreateInfo->arrayLayers == 0)
+ {
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VMA_DEBUG_LOG("vmaCreateImage");
+
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
+
+ *pImage = VK_NULL_HANDLE;
+ *pAllocation = VK_NULL_HANDLE;
+
+ // 1. Create VkImage.
+ VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
+ allocator->m_hDevice,
+ pImageCreateInfo,
+ allocator->GetAllocationCallbacks(),
+ pImage);
+ if(res >= 0)
+ {
+ VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
+ VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
+ VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
+
+ // 2. Allocate memory using allocator.
+ VkMemoryRequirements vkMemReq = {};
+ bool requiresDedicatedAllocation = false;
+ bool prefersDedicatedAllocation = false;
+ allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
+ requiresDedicatedAllocation, prefersDedicatedAllocation);
+
+ res = allocator->AllocateMemory(
+ vkMemReq,
+ requiresDedicatedAllocation,
+ prefersDedicatedAllocation,
+ VK_NULL_HANDLE, // dedicatedBuffer
+ *pImage, // dedicatedImage
+ *pAllocationCreateInfo,
+ suballocType,
+ 1, // allocationCount
+ pAllocation);
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordCreateImage(
- allocator->GetCurrentFrameIndex(),
- *pImageCreateInfo,
- *pAllocationCreateInfo,
- *pAllocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordCreateImage(
+ allocator->GetCurrentFrameIndex(),
+ *pImageCreateInfo,
+ *pAllocationCreateInfo,
+ *pAllocation);
+ }
#endif
- if(res >= 0)
- {
- // 3. Bind image with memory.
- if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0)
- {
- res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
- }
- if(res >= 0)
- {
- // All steps succeeded.
- #if VMA_STATS_STRING_ENABLED
- (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
- #endif
- if(pAllocationInfo != VMA_NULL)
- {
- allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
- }
-
- return VK_SUCCESS;
- }
- allocator->FreeMemory(
- 1, // allocationCount
- pAllocation);
- *pAllocation = VK_NULL_HANDLE;
- (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
- *pImage = VK_NULL_HANDLE;
- return res;
- }
- (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
- *pImage = VK_NULL_HANDLE;
- return res;
- }
- return res;
+ if(res >= 0)
+ {
+ // 3. Bind image with memory.
+ if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0)
+ {
+ res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
+ }
+ if(res >= 0)
+ {
+ // All steps succeeded.
+ #if VMA_STATS_STRING_ENABLED
+ (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
+ #endif
+ if(pAllocationInfo != VMA_NULL)
+ {
+ allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
+ }
+
+ return VK_SUCCESS;
+ }
+ allocator->FreeMemory(
+ 1, // allocationCount
+ pAllocation);
+ *pAllocation = VK_NULL_HANDLE;
+ (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
+ *pImage = VK_NULL_HANDLE;
+ return res;
+ }
+ (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
+ *pImage = VK_NULL_HANDLE;
+ return res;
+ }
+ return res;
}
VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage(
- VmaAllocator allocator,
- VkImage image,
- VmaAllocation allocation)
+ VmaAllocator allocator,
+ VkImage image,
+ VmaAllocation allocation)
{
- VMA_ASSERT(allocator);
+ VMA_ASSERT(allocator);
- if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
- {
- return;
- }
+ if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
+ {
+ return;
+ }
- VMA_DEBUG_LOG("vmaDestroyImage");
+ VMA_DEBUG_LOG("vmaDestroyImage");
- VMA_DEBUG_GLOBAL_MUTEX_LOCK
+ VMA_DEBUG_GLOBAL_MUTEX_LOCK
#if VMA_RECORDING_ENABLED
- if(allocator->GetRecorder() != VMA_NULL)
- {
- allocator->GetRecorder()->RecordDestroyImage(
- allocator->GetCurrentFrameIndex(),
- allocation);
- }
+ if(allocator->GetRecorder() != VMA_NULL)
+ {
+ allocator->GetRecorder()->RecordDestroyImage(
+ allocator->GetCurrentFrameIndex(),
+ allocation);
+ }
#endif
- if(image != VK_NULL_HANDLE)
- {
- (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
- }
- if(allocation != VK_NULL_HANDLE)
- {
- allocator->FreeMemory(
- 1, // allocationCount
- &allocation);
- }
+ if(image != VK_NULL_HANDLE)
+ {
+ (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
+ }
+ if(allocation != VK_NULL_HANDLE)
+ {
+ allocator->FreeMemory(
+ 1, // allocationCount
+ &allocation);
+ }
}
#endif // #ifdef VMA_IMPLEMENTATION